[ 543.560683] env[62619]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62619) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 543.561033] env[62619]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62619) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 543.561169] env[62619]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62619) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 543.561466] env[62619]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 543.655540] env[62619]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62619) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 543.666442] env[62619]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=62619) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 544.266532] env[62619]: INFO nova.virt.driver [None req-0f6e16fb-8753-4a93-9808-3932496c664f None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 544.336339] env[62619]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.336493] env[62619]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.336577] env[62619]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62619) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 547.578944] env[62619]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-31877ccb-54c5-4e43-bc9e-94683dbd28d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.595314] env[62619]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62619) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 547.595503] env[62619]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-a9b4f1e8-1b45-46e5-a4d1-b9bcf93919e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.627798] env[62619]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 81e49. [ 547.628086] env[62619]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.291s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.628515] env[62619]: INFO nova.virt.vmwareapi.driver [None req-0f6e16fb-8753-4a93-9808-3932496c664f None None] VMware vCenter version: 7.0.3 [ 547.632163] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23500228-857a-4419-a3b2-af331acdea39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.651276] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c20f5fa-94ac-4ebb-ac3c-1b9849d981a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.657729] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f9b2c9-ab3b-4ab8-8deb-2bfcaffc1beb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.664609] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84855f1b-ac29-4520-afcf-b7d698daedc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.678107] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259e4a3f-e60d-48e0-9ae8-29bff4de3860 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.684521] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b4569f-c09e-4bd3-9cd4-faa70d2bbb04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.715190] env[62619]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-d6ef42df-f32b-4f52-bfc5-6f64d81e8c9a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.720631] env[62619]: DEBUG nova.virt.vmwareapi.driver [None req-0f6e16fb-8753-4a93-9808-3932496c664f None None] Extension org.openstack.compute already exists. {{(pid=62619) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 547.723361] env[62619]: INFO nova.compute.provider_config [None req-0f6e16fb-8753-4a93-9808-3932496c664f None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 548.227108] env[62619]: DEBUG nova.context [None req-0f6e16fb-8753-4a93-9808-3932496c664f None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),46403131-4468-41b2-9ba6-5902360b2666(cell1) {{(pid=62619) load_cells /opt/stack/nova/nova/context.py:464}} [ 548.230109] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.230359] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.231041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.231474] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Acquiring lock "46403131-4468-41b2-9ba6-5902360b2666" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.231729] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Lock "46403131-4468-41b2-9ba6-5902360b2666" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.232776] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Lock "46403131-4468-41b2-9ba6-5902360b2666" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.252879] env[62619]: INFO dbcounter [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Registered counter for database nova_cell0 [ 548.261226] env[62619]: INFO dbcounter [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Registered counter for database nova_cell1 [ 548.264640] env[62619]: DEBUG oslo_db.sqlalchemy.engines [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62619) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 548.265059] env[62619]: DEBUG oslo_db.sqlalchemy.engines [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62619) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 548.269862] env[62619]: ERROR nova.db.main.api [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.269862] env[62619]: result = function(*args, **kwargs) [ 548.269862] env[62619]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 548.269862] env[62619]: return func(*args, **kwargs) [ 548.269862] env[62619]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 548.269862] env[62619]: result = fn(*args, **kwargs) [ 548.269862] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 548.269862] env[62619]: return f(*args, **kwargs) [ 548.269862] env[62619]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 548.269862] env[62619]: return db.service_get_minimum_version(context, binaries) [ 548.269862] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 548.269862] env[62619]: _check_db_access() [ 548.269862] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 548.269862] env[62619]: stacktrace = ''.join(traceback.format_stack()) [ 548.269862] env[62619]: [ 548.270903] env[62619]: ERROR nova.db.main.api [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.270903] env[62619]: result = function(*args, **kwargs) [ 548.270903] env[62619]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 548.270903] env[62619]: return func(*args, **kwargs) [ 548.270903] env[62619]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 548.270903] env[62619]: result = fn(*args, **kwargs) [ 548.270903] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 548.270903] env[62619]: return f(*args, **kwargs) [ 548.270903] env[62619]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 548.270903] env[62619]: return db.service_get_minimum_version(context, binaries) [ 548.270903] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 548.270903] env[62619]: _check_db_access() [ 548.270903] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 548.270903] env[62619]: stacktrace = ''.join(traceback.format_stack()) [ 548.270903] env[62619]: [ 548.271310] env[62619]: WARNING nova.objects.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Failed to get minimum service version for cell 46403131-4468-41b2-9ba6-5902360b2666 [ 548.271467] env[62619]: WARNING nova.objects.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 548.271921] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Acquiring lock "singleton_lock" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.272088] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Acquired lock "singleton_lock" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.272322] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Releasing lock "singleton_lock" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.272685] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Full set of CONF: {{(pid=62619) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 548.272830] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ******************************************************************************** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 548.272954] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] Configuration options gathered from: {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 548.273099] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 548.273294] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 548.273443] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ================================================================================ {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 548.273664] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] allow_resize_to_same_host = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.273830] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] arq_binding_timeout = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.273961] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] backdoor_port = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.274098] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] backdoor_socket = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.274264] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] block_device_allocate_retries = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.274422] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] block_device_allocate_retries_interval = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.274590] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cert = self.pem {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.274749] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.274912] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute_monitors = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.275085] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] config_dir = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.275255] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] config_drive_format = iso9660 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.275384] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.275547] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] config_source = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.275708] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] console_host = devstack {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.275867] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] control_exchange = nova {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.276030] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cpu_allocation_ratio = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.276191] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] daemon = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.276356] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] debug = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.276510] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] default_access_ip_network_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.276669] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] default_availability_zone = nova {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.276819] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] default_ephemeral_format = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.276973] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] default_green_pool_size = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.277240] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.277406] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] default_schedule_zone = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.277562] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] disk_allocation_ratio = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.277720] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] enable_new_services = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.277892] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] enabled_apis = ['osapi_compute'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.278064] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] enabled_ssl_apis = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.278226] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] flat_injected = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.278379] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] force_config_drive = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.278539] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] force_raw_images = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.278703] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] graceful_shutdown_timeout = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.278859] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] heal_instance_info_cache_interval = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.279100] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] host = cpu-1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.279276] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.279439] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.279604] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.279817] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.279978] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] instance_build_timeout = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.280152] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] instance_delete_interval = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.280318] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] instance_format = [instance: %(uuid)s] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.280482] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] instance_name_template = instance-%08x {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.280642] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] instance_usage_audit = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.280806] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] instance_usage_audit_period = month {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.280966] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.281144] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.281305] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] internal_service_availability_zone = internal {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.281459] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] key = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.281615] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] live_migration_retry_count = 30 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.281781] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] log_color = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.281939] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] log_config_append = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.282117] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.282274] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] log_dir = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.282459] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] log_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.282588] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] log_options = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.282749] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] log_rotate_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.282914] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] log_rotate_interval_type = days {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.283123] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] log_rotation_type = none {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.283280] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.283435] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.283639] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.283817] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.283945] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.284118] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] long_rpc_timeout = 1800 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.284280] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] max_concurrent_builds = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.284436] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] max_concurrent_live_migrations = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.284592] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] max_concurrent_snapshots = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.284752] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] max_local_block_devices = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.284898] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] max_logfile_count = 30 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.285065] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] max_logfile_size_mb = 200 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.285225] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] maximum_instance_delete_attempts = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.285391] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] metadata_listen = 0.0.0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.285556] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] metadata_listen_port = 8775 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.285720] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] metadata_workers = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.285878] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] migrate_max_retries = -1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.286054] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] mkisofs_cmd = genisoimage {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.286270] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.286401] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] my_ip = 10.180.1.21 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.286604] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.286763] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] network_allocate_retries = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.286938] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.287122] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.287284] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] osapi_compute_listen_port = 8774 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.287456] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] osapi_compute_unique_server_name_scope = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.287653] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] osapi_compute_workers = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.287820] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] password_length = 12 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.287978] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] periodic_enable = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.288154] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] periodic_fuzzy_delay = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.288320] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] pointer_model = usbtablet {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.288484] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] preallocate_images = none {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.288646] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] publish_errors = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.288774] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] pybasedir = /opt/stack/nova {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.288926] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ram_allocation_ratio = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.289098] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] rate_limit_burst = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.289266] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] rate_limit_except_level = CRITICAL {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.289423] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] rate_limit_interval = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.289583] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] reboot_timeout = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.289740] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] reclaim_instance_interval = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.289895] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] record = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.290071] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] reimage_timeout_per_gb = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.290243] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] report_interval = 120 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.290402] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] rescue_timeout = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.290560] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] reserved_host_cpus = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.290716] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] reserved_host_disk_mb = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.290872] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] reserved_host_memory_mb = 512 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.291040] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] reserved_huge_pages = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.291203] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] resize_confirm_window = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.291362] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] resize_fs_using_block_device = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.291522] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] resume_guests_state_on_host_boot = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.291688] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.291850] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] rpc_response_timeout = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.292015] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] run_external_periodic_tasks = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.292191] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] running_deleted_instance_action = reap {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.292406] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.292538] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] running_deleted_instance_timeout = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.292699] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler_instance_sync_interval = 120 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.292867] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_down_time = 720 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.293044] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] servicegroup_driver = db {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.293202] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] shell_completion = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.293375] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] shelved_offload_time = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.293550] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] shelved_poll_interval = 3600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.293717] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] shutdown_timeout = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.293876] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] source_is_ipv6 = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.294042] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ssl_only = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.294329] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.294502] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] sync_power_state_interval = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.294664] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] sync_power_state_pool_size = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.294873] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] syslog_log_facility = LOG_USER {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.294982] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] tempdir = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.295183] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] timeout_nbd = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.295361] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] transport_url = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.295523] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] update_resources_interval = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.295681] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] use_cow_images = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.295837] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] use_eventlog = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.295991] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] use_journal = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.296162] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] use_json = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.296320] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] use_rootwrap_daemon = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.296485] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] use_stderr = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.296656] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] use_syslog = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.296807] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vcpu_pin_set = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.296971] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plugging_is_fatal = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.297158] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plugging_timeout = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.297326] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] virt_mkfs = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.297483] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] volume_usage_poll_interval = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.297641] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] watch_log_file = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.297805] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] web = /usr/share/spice-html5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 548.297987] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.298192] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.298361] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.298533] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_concurrency.disable_process_locking = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.299730] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.299930] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.300119] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.300299] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.300500] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.300675] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.300859] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.auth_strategy = keystone {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.301041] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.compute_link_prefix = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.301224] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.301396] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.dhcp_domain = novalocal {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.301567] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.enable_instance_password = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.301729] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.glance_link_prefix = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.301892] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.302073] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.302241] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.instance_list_per_project_cells = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.302431] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.list_records_by_skipping_down_cells = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.302596] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.local_metadata_per_cell = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.302763] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.max_limit = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.302929] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.metadata_cache_expiration = 15 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.303114] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.neutron_default_tenant_id = default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.303291] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.response_validation = warn {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.303500] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.use_neutron_default_nets = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.303683] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.303847] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.304023] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.304201] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.304371] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.vendordata_dynamic_targets = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.304531] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.vendordata_jsonfile_path = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.304716] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.304910] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.backend = dogpile.cache.memcached {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.305089] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.backend_argument = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.305250] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.backend_expiration_time = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.305418] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.config_prefix = cache.oslo {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.305591] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.dead_timeout = 60.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.305744] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.debug_cache_backend = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.305900] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.enable_retry_client = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.306070] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.enable_socket_keepalive = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.306242] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.enabled = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.306410] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.enforce_fips_mode = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.306648] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.expiration_time = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.306829] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.hashclient_retry_attempts = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.306998] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.307179] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.memcache_dead_retry = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.307342] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.memcache_password = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.307503] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.307663] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.307822] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.memcache_pool_maxsize = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.307980] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.308157] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.memcache_sasl_enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.308333] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.308499] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.308659] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.memcache_username = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.308825] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.proxies = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.308987] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.redis_db = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.309162] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.redis_password = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.309335] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.309534] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.309723] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.redis_server = localhost:6379 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.309891] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.redis_socket_timeout = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.310065] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.redis_username = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.310236] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.retry_attempts = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.310405] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.retry_delay = 0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.310566] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.socket_keepalive_count = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.310728] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.socket_keepalive_idle = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.310888] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.socket_keepalive_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.311057] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.tls_allowed_ciphers = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.311217] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.tls_cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.311371] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.tls_certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.311533] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.tls_enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.311687] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cache.tls_keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.311857] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.312041] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.auth_type = password {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.312206] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.312403] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.312600] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.312775] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.312938] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.cross_az_attach = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.313114] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.debug = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.313276] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.endpoint_template = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.313462] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.http_retries = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.313605] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.313761] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.313931] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.os_region_name = RegionOne {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.314106] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.314269] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cinder.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.314440] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.314603] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.cpu_dedicated_set = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.314783] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.cpu_shared_set = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.314958] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.image_type_exclude_list = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.315139] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.315301] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.315491] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.315697] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.315874] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.316058] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.resource_provider_association_refresh = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.316225] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.316386] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.shutdown_retry_interval = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.316569] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.316747] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] conductor.workers = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.316922] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] console.allowed_origins = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.317108] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] console.ssl_ciphers = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.317294] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] console.ssl_minimum_version = default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.317465] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] consoleauth.enforce_session_timeout = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.317635] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] consoleauth.token_ttl = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.317798] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.317953] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.318131] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.318289] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.318446] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.318635] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.318802] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.318961] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.319134] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.319292] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.319448] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.319605] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.319759] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.319926] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.service_type = accelerator {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.320099] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.320260] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.320418] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.320573] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.320748] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.320938] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] cyborg.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.321148] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.backend = sqlalchemy {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.321322] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.connection = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.321492] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.connection_debug = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.321690] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.connection_parameters = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.321860] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.connection_recycle_time = 3600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.322035] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.connection_trace = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.322203] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.db_inc_retry_interval = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.322401] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.db_max_retries = 20 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.322652] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.db_max_retry_interval = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.322952] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.db_retry_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.323223] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.max_overflow = 50 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.323420] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.max_pool_size = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.323585] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.max_retries = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.323757] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.323917] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.mysql_wsrep_sync_wait = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.324124] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.pool_timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.324303] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.retry_interval = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.324461] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.slave_connection = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.324624] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.sqlite_synchronous = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.324784] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] database.use_db_reconnect = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.324962] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.backend = sqlalchemy {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.325147] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.connection = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.325312] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.connection_debug = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.325478] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.connection_parameters = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.325673] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.connection_recycle_time = 3600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.325864] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.connection_trace = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.326043] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.db_inc_retry_interval = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.326214] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.db_max_retries = 20 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.326376] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.db_max_retry_interval = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.326539] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.db_retry_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.326695] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.max_overflow = 50 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.326855] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.max_pool_size = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.327030] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.max_retries = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.327207] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.327369] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.327529] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.pool_timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.327689] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.retry_interval = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.327846] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.slave_connection = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.328013] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] api_database.sqlite_synchronous = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.328196] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] devices.enabled_mdev_types = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.328372] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.328553] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.328729] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ephemeral_storage_encryption.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.328901] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.329084] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.api_servers = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.329250] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.329410] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.329572] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.329728] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.329881] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.330051] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.debug = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.330216] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.default_trusted_certificate_ids = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.330376] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.enable_certificate_validation = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.330536] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.enable_rbd_download = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.330690] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.330850] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.331014] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.331180] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.331333] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.331491] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.num_retries = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.331668] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.rbd_ceph_conf = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.331846] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.rbd_connect_timeout = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.332026] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.rbd_pool = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.332215] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.rbd_user = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.332396] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.332566] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.332725] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.332899] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.service_type = image {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.333075] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.333236] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.333394] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.333552] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.333729] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.333892] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.verify_glance_signatures = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.334061] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] glance.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.334232] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] guestfs.debug = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.334398] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.334586] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.auth_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.334741] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.334912] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.335089] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.335252] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.335409] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.335571] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.335733] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.335889] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.336058] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.336217] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.336373] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.336529] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.336683] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.336849] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.service_type = shared-file-system {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.337019] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.share_apply_policy_timeout = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.337185] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.337345] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.337502] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.337666] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.337879] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.338055] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] manila.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.338228] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] mks.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.338598] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.338792] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] image_cache.manager_interval = 2400 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.338960] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] image_cache.precache_concurrency = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.339146] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] image_cache.remove_unused_base_images = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.339317] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.339483] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.339662] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] image_cache.subdirectory_name = _base {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.339839] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.api_max_retries = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.340010] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.api_retry_interval = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.340177] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.340371] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.auth_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.340529] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.340689] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.340853] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.341023] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.conductor_group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.341185] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.341342] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.341499] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.341660] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.341816] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.341975] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.342146] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.342312] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.peer_list = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.342501] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.342667] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.342831] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.serial_console_state_timeout = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.342989] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.343183] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.service_type = baremetal {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.343335] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.shard = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.343543] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.343716] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.343874] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.344041] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.344228] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.344387] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ironic.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.344569] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.344740] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] key_manager.fixed_key = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.344918] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.345098] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.barbican_api_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.345258] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.barbican_endpoint = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.345427] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.barbican_endpoint_type = public {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.345612] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.barbican_region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.345778] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.345937] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.346120] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.346284] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.346462] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.346643] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.number_of_retries = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.346805] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.retry_delay = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.346966] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.send_service_user_token = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.347144] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.347301] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.347462] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.verify_ssl = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.347619] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican.verify_ssl_path = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.347781] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican_service_user.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.347943] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican_service_user.auth_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.348114] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican_service_user.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.348270] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican_service_user.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.348432] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican_service_user.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.348590] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican_service_user.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.348745] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican_service_user.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.348904] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican_service_user.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.349071] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] barbican_service_user.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.349241] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.approle_role_id = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.349398] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.approle_secret_id = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.349603] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.kv_mountpoint = secret {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.349772] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.kv_path = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.349937] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.kv_version = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.350112] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.namespace = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.350274] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.root_token_id = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.350431] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.ssl_ca_crt_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.350600] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.timeout = 60.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.350761] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.use_ssl = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.350930] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.351109] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.351268] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.351432] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.351589] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.351742] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.351896] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.352067] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.352226] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.352396] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.352585] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.352752] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.352910] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.353080] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.353251] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.service_type = identity {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.353417] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.353580] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.353735] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.353890] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.354081] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.354244] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] keystone.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.354436] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.ceph_mount_options = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.354762] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.354940] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.connection_uri = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.355114] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.cpu_mode = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.355281] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.355446] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.cpu_models = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.355643] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.cpu_power_governor_high = performance {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.355816] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.355984] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.cpu_power_management = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.356171] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.356342] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.device_detach_attempts = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.356604] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.device_detach_timeout = 20 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.356825] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.disk_cachemodes = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.357009] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.disk_prefix = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.357191] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.enabled_perf_events = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.357360] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.file_backed_memory = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.357530] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.gid_maps = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.357688] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.hw_disk_discard = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.357843] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.hw_machine_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.358020] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.images_rbd_ceph_conf = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.358191] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.358352] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.358520] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.images_rbd_glance_store_name = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.358683] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.images_rbd_pool = rbd {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.358849] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.images_type = default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.359008] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.images_volume_group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.359177] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.inject_key = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.359337] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.inject_partition = -2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.359494] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.inject_password = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.359699] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.iscsi_iface = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.359873] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.iser_use_multipath = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.360048] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.360228] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.360390] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_downtime = 500 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.360551] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.360708] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.360868] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_inbound_addr = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.361040] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.361206] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.361364] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_scheme = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.361533] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_timeout_action = abort {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.361694] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_tunnelled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.361850] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_uri = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.362014] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.live_migration_with_native_tls = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.362179] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.max_queues = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.362340] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.362620] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.362803] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.nfs_mount_options = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.363131] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.363320] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.363509] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.363686] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.363851] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.364025] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.num_pcie_ports = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.364196] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.364361] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.pmem_namespaces = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.364519] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.quobyte_client_cfg = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.364834] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.365022] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.365190] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.365353] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.365514] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.rbd_secret_uuid = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.365691] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.rbd_user = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.365868] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.366051] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.366215] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.rescue_image_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.366372] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.rescue_kernel_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.366531] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.rescue_ramdisk_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.366698] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.366854] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.rx_queue_size = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.367102] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.smbfs_mount_options = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.367316] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.367495] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.snapshot_compression = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.367657] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.snapshot_image_format = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.367876] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.368053] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.sparse_logical_volumes = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.368219] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.swtpm_enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.368386] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.swtpm_group = tss {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.368550] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.swtpm_user = tss {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.368736] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.sysinfo_serial = unique {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.368903] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.tb_cache_size = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.369072] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.tx_queue_size = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.369238] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.uid_maps = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.369399] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.use_virtio_for_bridges = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.369568] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.virt_type = kvm {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.369729] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.volume_clear = zero {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.369888] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.volume_clear_size = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.370064] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.volume_use_multipath = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.370224] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.vzstorage_cache_path = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.370389] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.370558] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.370720] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.370884] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.371193] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.371376] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.vzstorage_mount_user = stack {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.371543] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.371756] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.371942] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.auth_type = password {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.372117] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.372278] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.372484] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.372650] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.372841] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.373033] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.default_floating_pool = public {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.373198] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.373364] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.extension_sync_interval = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.373525] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.http_retries = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.373687] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.373843] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.373998] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.374181] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.374338] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.374503] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.ovs_bridge = br-int {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.374694] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.physnets = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.374912] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.region_name = RegionOne {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.375096] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.375270] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.service_metadata_proxy = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.375433] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.375596] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.service_type = network {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.375756] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.375911] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.376079] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.376244] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.376424] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.376583] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] neutron.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.376794] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] notifications.bdms_in_notifications = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.376978] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] notifications.default_level = INFO {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.377169] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] notifications.notification_format = unversioned {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.377333] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] notifications.notify_on_state_change = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.377505] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.377693] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] pci.alias = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.377882] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] pci.device_spec = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.378061] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] pci.report_in_placement = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.378237] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.378409] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.auth_type = password {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.378575] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.378731] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.378886] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.379059] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.379220] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.379376] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.379536] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.default_domain_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.379691] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.default_domain_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.379865] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.domain_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.380051] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.domain_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.380215] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.380407] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.380578] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.380737] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.380890] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.381069] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.password = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.381230] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.project_domain_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.381399] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.project_domain_name = Default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.381564] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.project_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.381734] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.project_name = service {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.381899] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.region_name = RegionOne {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.382070] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.382545] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.382545] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.service_type = placement {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.382621] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.382727] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.382884] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.383053] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.system_scope = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.383215] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.383371] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.trust_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.383593] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.user_domain_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.383779] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.user_domain_name = Default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.383941] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.user_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.384129] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.username = nova {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.384314] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.384474] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] placement.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.384651] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.cores = 20 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.384813] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.count_usage_from_placement = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.384986] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.385169] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.injected_file_content_bytes = 10240 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.385334] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.injected_file_path_length = 255 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.385497] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.injected_files = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.385664] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.instances = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.385827] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.key_pairs = 100 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.385988] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.metadata_items = 128 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.386166] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.ram = 51200 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.386328] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.recheck_quota = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.386517] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.server_group_members = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.386696] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.server_groups = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.386906] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.387092] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] quota.unified_limits_resource_strategy = require {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.387266] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.387428] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.387589] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler.image_metadata_prefilter = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.387747] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.387905] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler.max_attempts = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.388076] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler.max_placement_results = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.388240] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.388402] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.388562] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.388731] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] scheduler.workers = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.388900] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.389080] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.389268] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.389445] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.389634] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.389803] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.389966] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.390168] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.390337] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.host_subset_size = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.390501] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.390658] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.390815] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.390975] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.isolated_hosts = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.391148] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.isolated_images = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.391308] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.391465] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.391627] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.391786] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.pci_in_placement = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.391989] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.392171] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.392332] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.392517] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.392683] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.392841] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.392997] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.track_instance_changes = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.393186] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.393371] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] metrics.required = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.393548] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] metrics.weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.393706] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.393865] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] metrics.weight_setting = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.394204] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.394382] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] serial_console.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.394560] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] serial_console.port_range = 10000:20000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.394727] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.394895] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.395071] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] serial_console.serialproxy_port = 6083 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.395240] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_user.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.395409] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_user.auth_type = password {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.395600] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_user.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.395777] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_user.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.395942] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_user.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.396116] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_user.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.396275] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_user.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.396445] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_user.send_service_user_token = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.396616] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_user.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.396782] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] service_user.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.396951] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.agent_enabled = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.397127] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.397440] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.397650] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.397820] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.html5proxy_port = 6082 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.397981] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.image_compression = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.398155] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.jpeg_compression = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.398313] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.playback_compression = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.398475] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.require_secure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.398642] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.server_listen = 127.0.0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.398806] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.398963] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.streaming_mode = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.399132] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] spice.zlib_compression = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.399296] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] upgrade_levels.baseapi = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.399462] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] upgrade_levels.compute = auto {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.399621] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] upgrade_levels.conductor = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.399777] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] upgrade_levels.scheduler = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.399938] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.400109] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.400270] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vendordata_dynamic_auth.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.400423] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vendordata_dynamic_auth.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.400585] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.400743] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vendordata_dynamic_auth.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.400898] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.401070] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.401228] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vendordata_dynamic_auth.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.401399] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.api_retry_count = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.401556] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.ca_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.401723] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.401887] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.cluster_name = testcl1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.402060] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.connection_pool_size = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.402220] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.console_delay_seconds = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.402416] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.datastore_regex = ^datastore.* {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.402649] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.402825] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.host_password = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.402992] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.host_port = 443 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.403176] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.host_username = administrator@vsphere.local {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.403367] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.insecure = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.403558] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.integration_bridge = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.403735] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.maximum_objects = 100 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.403894] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.pbm_default_policy = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.404067] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.pbm_enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.404228] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.pbm_wsdl_location = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.404393] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.404548] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.serial_port_proxy_uri = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.404702] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.serial_port_service_uri = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.404865] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.task_poll_interval = 0.5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.405047] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.use_linked_clone = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.405220] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.vnc_keymap = en-us {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.405388] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.vnc_port = 5900 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.405563] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vmware.vnc_port_total = 10000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.405817] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vnc.auth_schemes = ['none'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.406015] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vnc.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.406342] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.406531] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.406700] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vnc.novncproxy_port = 6080 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.406888] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vnc.server_listen = 127.0.0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.407096] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.407265] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vnc.vencrypt_ca_certs = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.407423] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vnc.vencrypt_client_cert = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.407657] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vnc.vencrypt_client_key = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.407760] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.407921] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.disable_deep_image_inspection = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.408091] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.408254] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.408412] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.408574] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.disable_rootwrap = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.408731] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.enable_numa_live_migration = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.408888] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.409057] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.409219] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.409378] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.libvirt_disable_apic = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.409539] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.409699] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.409859] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.410030] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.410198] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.410361] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.410522] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.410678] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.410836] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.411007] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.411198] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.411366] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.client_socket_timeout = 900 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.411531] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.default_pool_size = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.411695] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.keep_alive = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.411856] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.max_header_line = 16384 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.412026] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.412192] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.ssl_ca_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.412375] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.ssl_cert_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.412535] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.ssl_key_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.412702] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.tcp_keepidle = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.412872] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.413048] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] zvm.ca_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.413212] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] zvm.cloud_connector_url = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.413590] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.413772] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] zvm.reachable_timeout = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.413946] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.414139] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.414318] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.connection_string = messaging:// {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.414482] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.414650] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.es_doc_type = notification {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.414809] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.es_scroll_size = 10000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.414973] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.es_scroll_time = 2m {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.415150] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.filter_error_trace = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.415313] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.hmac_keys = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.415474] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.sentinel_service_name = mymaster {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.415640] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.socket_timeout = 0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.415798] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.trace_requests = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.415954] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler.trace_sqlalchemy = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.416139] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler_jaeger.process_tags = {} {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.416295] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler_jaeger.service_name_prefix = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.416454] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] profiler_otlp.service_name_prefix = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.416617] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] remote_debug.host = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.416769] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] remote_debug.port = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.416943] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.417116] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.417277] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.417436] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.417596] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.417781] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.417959] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.418137] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.418295] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.418460] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.418617] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.418782] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.418946] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.419123] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.419290] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.419454] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.419613] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.419780] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.419937] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.420105] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.420270] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.420428] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.420587] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.420747] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.420901] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.421071] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.421233] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.421389] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.421553] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.421722] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.ssl = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.422006] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.422227] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.422422] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.422604] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.422773] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.422932] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.423137] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.423307] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_notifications.retry = -1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.423503] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.423676] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.423843] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.424009] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.auth_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.424171] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.424326] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.424485] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.424652] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.424845] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.425010] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.endpoint_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.425187] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.endpoint_interface = publicURL {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.425345] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.425514] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.endpoint_region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.425734] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.endpoint_service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.425908] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.endpoint_service_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.426108] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.426304] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.426469] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.426628] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.426785] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.426943] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.427121] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.427280] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.service_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.427441] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.427605] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.427809] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.427992] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.428217] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.valid_interfaces = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.428406] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_limit.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.428592] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_reports.file_event_handler = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.428758] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.428960] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] oslo_reports.log_dir = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.429163] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.429327] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.429486] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.429652] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.429813] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.429970] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.430151] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.430339] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_ovs_privileged.group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.430513] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.430683] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.430847] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.431052] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] vif_plug_ovs_privileged.user = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.431270] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.431511] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.431763] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.431999] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.432243] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.432476] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.432678] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.432850] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.433047] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.433264] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_ovs.isolate_vif = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.433446] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.433614] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.433781] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.433999] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.434203] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] os_vif_ovs.per_port_bridge = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.434379] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] privsep_osbrick.capabilities = [21] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.434548] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] privsep_osbrick.group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.434760] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] privsep_osbrick.helper_command = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.434944] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.435126] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.435286] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] privsep_osbrick.user = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.435496] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.435665] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] nova_sys_admin.group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.435849] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] nova_sys_admin.helper_command = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.436038] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.436242] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.436427] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] nova_sys_admin.user = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 548.436587] env[62619]: DEBUG oslo_service.service [None req-2b274e06-79cf-4889-af2c-bcdaf9c7f3a0 None None] ******************************************************************************** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 548.437101] env[62619]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 548.940989] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Getting list of instances from cluster (obj){ [ 548.940989] env[62619]: value = "domain-c8" [ 548.940989] env[62619]: _type = "ClusterComputeResource" [ 548.940989] env[62619]: } {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 548.942050] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7703dcdd-d9d2-4dcc-bb9b-5f9b7f39a928 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.951184] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Got total of 0 instances {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 548.951810] env[62619]: WARNING nova.virt.vmwareapi.driver [None req-955626d3-32cb-4855-babc-802921d6a887 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 548.952309] env[62619]: INFO nova.virt.node [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Generated node identity e814b747-ed75-487b-a97d-acf66bc6db0b [ 548.952572] env[62619]: INFO nova.virt.node [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Wrote node identity e814b747-ed75-487b-a97d-acf66bc6db0b to /opt/stack/data/n-cpu-1/compute_id [ 549.455583] env[62619]: WARNING nova.compute.manager [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Compute nodes ['e814b747-ed75-487b-a97d-acf66bc6db0b'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 550.461060] env[62619]: INFO nova.compute.manager [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 551.466083] env[62619]: WARNING nova.compute.manager [None req-955626d3-32cb-4855-babc-802921d6a887 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 551.466427] env[62619]: DEBUG oslo_concurrency.lockutils [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.466608] env[62619]: DEBUG oslo_concurrency.lockutils [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.466756] env[62619]: DEBUG oslo_concurrency.lockutils [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.466907] env[62619]: DEBUG nova.compute.resource_tracker [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 551.467841] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d85b05-f615-424d-ab4f-b4f919e858a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.475773] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc71f864-cc3f-4261-814d-1e8b10eb8548 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.489486] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a844c929-8471-498c-bcf3-159bd0ea444b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.495972] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326b4341-90ad-478d-9ddb-eabf0a176950 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.525160] env[62619]: DEBUG nova.compute.resource_tracker [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181376MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 551.525343] env[62619]: DEBUG oslo_concurrency.lockutils [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.525542] env[62619]: DEBUG oslo_concurrency.lockutils [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.027692] env[62619]: WARNING nova.compute.resource_tracker [None req-955626d3-32cb-4855-babc-802921d6a887 None None] No compute node record for cpu-1:e814b747-ed75-487b-a97d-acf66bc6db0b: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host e814b747-ed75-487b-a97d-acf66bc6db0b could not be found. [ 552.531731] env[62619]: INFO nova.compute.resource_tracker [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: e814b747-ed75-487b-a97d-acf66bc6db0b [ 554.040632] env[62619]: DEBUG nova.compute.resource_tracker [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 554.041027] env[62619]: DEBUG nova.compute.resource_tracker [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 554.228119] env[62619]: INFO nova.scheduler.client.report [None req-955626d3-32cb-4855-babc-802921d6a887 None None] [req-0dc26234-2eed-4c9f-8f38-c9f5e52dc907] Created resource provider record via placement API for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 554.245254] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71925d05-b6e6-4888-8c26-a488a214264b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.252932] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158398a9-ab87-4aac-9f90-830323ea82f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.282911] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c07281d-4e14-4032-8e0a-5d07f157f50a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.289477] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a98c657-9fb3-433a-80b4-cc1c7ccaeac4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.302044] env[62619]: DEBUG nova.compute.provider_tree [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 554.872730] env[62619]: DEBUG nova.scheduler.client.report [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 554.872957] env[62619]: DEBUG nova.compute.provider_tree [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 0 to 1 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 554.873108] env[62619]: DEBUG nova.compute.provider_tree [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 554.943844] env[62619]: DEBUG nova.compute.provider_tree [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 1 to 2 during operation: update_traits {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 555.448451] env[62619]: DEBUG nova.compute.resource_tracker [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 555.448777] env[62619]: DEBUG oslo_concurrency.lockutils [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.923s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.448777] env[62619]: DEBUG nova.service [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Creating RPC server for service compute {{(pid=62619) start /opt/stack/nova/nova/service.py:186}} [ 555.461499] env[62619]: DEBUG nova.service [None req-955626d3-32cb-4855-babc-802921d6a887 None None] Join ServiceGroup membership for this service compute {{(pid=62619) start /opt/stack/nova/nova/service.py:203}} [ 555.461738] env[62619]: DEBUG nova.servicegroup.drivers.db [None req-955626d3-32cb-4855-babc-802921d6a887 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62619) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 608.463205] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.463655] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.463712] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 608.463841] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 608.967079] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 608.967346] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.967535] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.967728] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.967917] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.968112] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 608.968296] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_power_states {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 609.471321] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Getting list of instances from cluster (obj){ [ 609.471321] env[62619]: value = "domain-c8" [ 609.471321] env[62619]: _type = "ClusterComputeResource" [ 609.471321] env[62619]: } {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 609.472533] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90db51da-1cfc-4f40-8f70-14b7e6069ff1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.481027] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Got total of 0 instances {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 609.481267] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 609.481435] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 609.481583] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 609.984293] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.984531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.984693] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.984845] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 609.985742] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a255841-3416-4262-9abc-177c5c22d282 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.994152] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7accc7-23ac-47cc-b5e6-cdf1d0f3c947 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.008196] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12302b5f-fe00-46a2-8049-ee4fe6f89f1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.014558] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41958ee2-5fb3-4354-8a45-8bf862346fd0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.043326] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181363MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 610.043488] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.043806] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.061657] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 611.061911] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 611.078042] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e696f78-e2e6-4044-9d9a-fee5d995c23c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.083704] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bec7f5-5129-4183-80f8-74473aa0bcf0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.116026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9245a140-aaa5-4c72-9368-216d139207bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.121654] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc0b022-b6c0-40b6-8a41-5d57508b214b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.134428] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.641023] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 612.145223] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 612.145223] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.100s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.145223] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 612.145223] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Getting list of instances from cluster (obj){ [ 612.145223] env[62619]: value = "domain-c8" [ 612.145223] env[62619]: _type = "ClusterComputeResource" [ 612.145223] env[62619]: } {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 612.145731] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb78605-9946-4c59-8124-e49d20f5ff40 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.153959] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Got total of 0 instances {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 667.397200] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 667.397507] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 667.903216] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 667.903216] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 667.903216] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 668.406234] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 668.406595] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.406769] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.406986] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.407240] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.407458] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.407675] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.407872] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 668.408154] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.912241] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.912509] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.912691] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.912851] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 668.913771] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af45691d-147e-4dc1-afb1-7b322df610fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.922279] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94799679-a8a2-4518-97e0-4896d98f2eaf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.936999] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0627fea0-f1f5-44e5-a0c5-ec9711c2448f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.944606] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa57d4a-3e33-4ec7-a290-155932cdb36f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.975351] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181358MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 668.975593] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.975735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.996703] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 669.996936] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 670.009410] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2deec6cc-add0-43d5-8552-310f4d1f92f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.017285] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c443779-b856-4543-ba56-8e486f2f5496 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.046710] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6054e24d-ed44-4b5c-84eb-5d160175e6bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.053414] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7358c487-2207-4ecf-9875-a0e81b911d0f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.065995] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.568796] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 670.570069] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 670.570247] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.571365] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.571834] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.571834] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 730.571834] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 731.075317] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 731.075543] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.075694] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.075845] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.076020] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.076165] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.076322] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.076451] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 731.076587] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.579360] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.579724] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.579763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.579883] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 731.580784] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1fae1d-02bc-48ff-881b-ed90ca3a62b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.589812] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3919edf-76f2-47b7-9fc7-754ea5579860 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.603147] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e720485f-1368-4501-87c1-a77dbbc21edc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.609242] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd9d2fb-0e80-4ae4-9009-c26f21a94fb6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.637277] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181375MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 731.637410] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.637581] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.655582] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 732.655845] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 732.668365] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb251fa7-e89a-41a1-9f5e-7d0f9edc4ff6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.676256] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56962af-61be-4834-a25a-6ad8a53e75db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.705868] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff57296-0c4a-4e1b-acbf-6ebf742e180e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.712812] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cac6736-a4ba-4666-8f11-b42124c0c376 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.725505] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.228915] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 733.230180] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 733.230376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.364853] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.365238] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.868892] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.869068] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 790.869189] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 791.372361] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 791.372736] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.372775] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.372958] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.373123] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.373262] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.373427] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.373560] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 791.373701] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.876996] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.877270] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.877437] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.877589] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 791.878507] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32fe18a-e145-4673-b720-bb24724aa338 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.887071] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c8715d-d7b3-4865-91e4-8d9d0e605752 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.901304] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85038a82-1c78-4b7a-8a39-71f5cb591038 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.907640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3954b8-0028-428b-bea6-2a1a1ba02889 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.935476] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181368MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 791.935617] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.935802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.953337] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 792.953591] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 792.966703] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c13fb2-d70d-4686-ad0d-11c6ab5cdfff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.974102] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbfab74-af11-43cd-91f4-6239ec57a91e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.005042] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7358af7f-6d33-412a-bc5c-48b00b4a9ce3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.011889] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa0b209-e02d-48fc-a54f-036b662edde8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.024485] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.527310] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 793.528570] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 793.528748] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.712342] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 843.712722] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Cleaning up deleted instances {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11609}} [ 844.216424] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] There are 0 instances to clean {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11618}} [ 844.216691] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.216790] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Cleaning up deleted instances with incomplete migration {{(pid=62619) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11647}} [ 844.719878] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.219304] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.219670] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.219670] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 846.709892] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.710076] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 846.710204] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 847.213188] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 847.213593] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 847.709426] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.709729] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.710111] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.710159] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.213970] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.214256] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.214423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.214585] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 849.215634] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735309da-5fb3-4fff-87a9-1395639d81f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.223837] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0903921-95f2-42fc-bc0a-5d49480b5a86 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.238387] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efc8731-44ee-4f08-acc3-6db4e7e07763 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.244509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a9bcf5-f092-43b2-8dcf-b0d9e91c20f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.272719] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181386MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 849.272887] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.273049] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.291612] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 850.291905] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 850.304399] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782642d7-8789-46a4-a2c9-607cf8ef1db4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.311824] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bff645c-d12a-4be9-b016-9db3841340d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.341670] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fea474-c4e2-45da-a0b1-a8a4efa9e39f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.348205] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ed419e-900a-4b15-bbd6-678ac52a830c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.360495] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.863208] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 850.864460] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 850.864638] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.859910] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 905.709054] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 905.709467] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 905.709467] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 907.710235] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.710069] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.710069] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 908.710069] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 909.213153] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 909.213407] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.213580] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.709357] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.704296] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.209351] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.712376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.715149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.715149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.715149] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 911.715149] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9385efa-bb2d-4c96-8c78-fc99e9001766 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.723645] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f3db6f-8c04-447b-ac94-cafdfa573809 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.737866] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234dfd9f-6c38-45c4-907d-78fcf114c45f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.745021] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f68e19d-fa7a-4b11-9914-0aa2f039b70d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.774521] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181383MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 911.774957] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.775333] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.808176] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 912.808487] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 912.826316] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 912.840466] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 912.840633] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 912.852109] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 912.866774] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 912.877330] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ce587b-4dd0-478f-8548-fc4dc7bde022 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.884452] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097d7cf7-5d4a-45d2-899a-90b25487508e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.913511] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebd7092-94d0-4139-a90e-1e7d5dd0d9ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.920767] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737ce4cb-8f43-4971-933f-3d2580dcf3e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.934159] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.437278] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.439183] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 913.439420] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.664s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.441044] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 965.709276] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 965.709672] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 967.711270] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.709751] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.710211] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 969.710211] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 970.213609] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 970.213842] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.709155] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.709412] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.709537] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.213017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.213442] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.213442] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.213564] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 971.214495] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3848405-9f7a-4dee-a04d-f5f34f77590a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.222117] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa32c5d-b594-4b31-af91-c1ff5e5753a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.236091] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df01774-20cf-4309-a4a7-17228c369eb4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.241952] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b44daa-7711-4e60-9fd8-4a87afa29845 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.271611] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181383MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 971.271763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.271937] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.289530] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 972.289862] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 972.303130] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3773c7-409b-4955-b999-1c7aaf38583c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.312150] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27a31a8-5f24-44fe-8e57-add74a99ef6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.341911] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6d1e89-b520-4399-86a5-4af8af90aa67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.348741] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66f3e48-fe67-4eb2-b33d-ecbfffb1f96d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.361316] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.864780] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.866115] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 972.866305] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.866106] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.866556] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.709907] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.710324] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.710447] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1030.709328] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.709803] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.213227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.213510] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.213758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.213950] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1031.214860] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bdf4b4-e430-4c79-bbff-7546491c28a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.222599] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78d4bb7-5ef9-4ab5-b95b-0a6dc1ddefad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.236070] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e7a293-41a7-4499-994f-9473161343f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.241849] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dafc04c-47c2-41a6-a5c8-e63a4e41b32c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.269944] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181302MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1031.270099] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.270274] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.288751] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1032.288994] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1032.303457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb10c8d0-7332-4cb3-900a-4a5e0b1ee660 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.310736] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8600824a-b274-4682-aa9a-6502b91f3771 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.339392] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e61a336-8c92-49fd-973a-79c80a2dc1aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.346234] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2caa76-c4d1-4316-8495-42a356f40452 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.358531] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.862063] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.863278] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1032.863458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.862398] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.862750] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.368063] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.368063] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1034.368063] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1034.869361] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1034.871135] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.871135] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.871135] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.711104] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.711534] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1088.710314] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.710065] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.708947] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.709159] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.709322] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.709468] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.213060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.213338] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.213500] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.213649] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1093.214543] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a23642d-c6c7-4d1b-9be0-2726a01a92d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.222522] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eeb4339-449e-4a7b-975f-32fb2f6ff982 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.237022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa03ab26-d3f9-4e74-a964-975bb8a3bdc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.241906] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35a18ef-db22-4bac-b40b-76f5859704b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.269702] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181371MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1093.269832] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.270022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.290261] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1094.290530] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1094.304164] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c089dec4-30d7-4b6d-9640-06530a34b87a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.311833] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357e8e60-cdc4-4fbe-8dc2-62a0bf4ac78f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.342118] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a33e95-ab8f-4505-8069-de7055b9f8dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.348957] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e4388f-cd08-41d3-90b0-daab3c7bbc4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.361605] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.864844] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.866095] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1094.866278] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.866647] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.867073] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.867073] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1095.867265] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1096.370242] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1145.709649] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.212470] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.212857] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1150.709597] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.710441] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.710894] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Cleaning up deleted instances {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11609}} [ 1152.214014] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] There are 0 instances to clean {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11618}} [ 1152.214214] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.214358] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Cleaning up deleted instances with incomplete migration {{(pid=62619) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11647}} [ 1153.713997] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.218203] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.218440] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.218579] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.721207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.721558] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.721656] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.721801] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1154.722819] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90603fc8-cf15-43a9-9945-e996915cbdbd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.731351] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d068fdd-fd41-47a6-8b3e-3a6bc2eaff64 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.744530] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d53194-a5ac-4e65-8394-d2e0f9484ba0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.750396] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c433c28-1f64-4b15-a2e5-12215632654c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.778066] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181386MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1154.778216] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.778382] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.797744] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1155.797744] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1155.810105] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a774630a-0835-4fcf-89d6-6a29f0b048fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.818420] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01c8cb9-dd18-494c-8d62-288a6b1b1b1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.847712] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebf7421-c1ca-4be1-af2a-739ce2cff84c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.854460] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c659655-3643-4448-a017-1eb69efbf49e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.866854] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.370443] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1156.371673] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1156.371843] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.862546] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1156.862920] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1156.862920] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1157.366183] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1157.366429] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.366593] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.208989] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.712296] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.712819] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1208.970459] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_power_states {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.474533] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Getting list of instances from cluster (obj){ [ 1209.474533] env[62619]: value = "domain-c8" [ 1209.474533] env[62619]: _type = "ClusterComputeResource" [ 1209.474533] env[62619]: } {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1209.475968] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42458828-5f18-4f35-a65f-bc426f1143a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.485147] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Got total of 0 instances {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1213.224414] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.709683] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.709924] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.709522] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.709962] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1214.709962] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1215.213124] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1215.213389] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.213550] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.717201] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.717577] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.717661] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.717803] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1215.718642] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b7565e-490d-44bf-b9b6-ec8f78eda3ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.727168] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24561a4c-6eee-4384-b0a1-3398252eb450 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.741031] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55db0dd1-a244-4f03-a04a-083165f109eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.746973] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e467aca6-f2e6-42f9-9346-f601cc1c01e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.774993] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181369MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1215.775171] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.775324] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.894689] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1216.894969] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1216.910068] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1216.922490] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1216.922671] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1216.932514] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1216.947478] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1216.959669] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8313964-2a39-4684-8972-6f7f4c357092 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.966711] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4928e358-0e1b-4618-9104-9318c8582b86 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.996632] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a9a1f9-ef27-4d85-bb3e-218e3ad9a9a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.003404] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c495fbb-baa7-41a0-8af4-9dd3c2451c63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.015731] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1217.519273] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1217.520625] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1217.520808] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.745s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.017067] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1219.017681] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.711759] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.712273] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1272.711414] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.710205] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.705112] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.209602] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.209796] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1275.209874] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1275.713372] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1275.713771] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.713812] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.709021] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.709295] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.212701] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.213218] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.213218] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.213340] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1277.214615] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d94dd5-f57b-4912-96c1-7a2b3d438704 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.222866] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca441f2-7f5f-4967-b3b9-abc7d9477309 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.236397] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec51a4e-9e33-459a-95dc-ccc6631abe04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.242510] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7860bf-944a-4801-b273-cb48b4f80c00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.270698] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181261MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1277.270847] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.271032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.288857] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1278.289119] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1278.303255] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b07f6cb-744d-4807-a87a-57f38ba93604 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.310841] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a151e880-85a3-4a0a-9323-abaf657e612b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.341050] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6917011-d330-4824-9001-344fe117034c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.348299] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1b593f-dbc3-46ad-b929-ca645e4e8c0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.361533] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.864385] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1278.865598] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1278.865778] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.861834] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.002190] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "08c1fef9-40fc-4420-91de-fe911dea70f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.002190] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "08c1fef9-40fc-4420-91de-fe911dea70f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.504995] env[62619]: DEBUG nova.compute.manager [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1329.711710] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.711828] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1330.060022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.060022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.063584] env[62619]: INFO nova.compute.claims [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1331.126499] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b506a618-3710-4225-aba7-cbc38a243493 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.137125] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47560fe4-6c4e-49b9-8b40-644a42b955bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.169412] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a355bc-d89c-49fa-abc4-2530a56415c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.176842] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f2298c-1252-42b5-afb3-4b5e370c3802 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.195270] env[62619]: DEBUG nova.compute.provider_tree [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.574186] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "aa576459-65bf-4b16-ad1d-0930497522eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.574424] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "aa576459-65bf-4b16-ad1d-0930497522eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.699598] env[62619]: DEBUG nova.scheduler.client.report [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1332.077887] env[62619]: DEBUG nova.compute.manager [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1332.206913] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.148s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.208066] env[62619]: DEBUG nova.compute.manager [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1332.424035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquiring lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.424035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.612722] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.613013] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.616095] env[62619]: INFO nova.compute.claims [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1332.709497] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1332.717718] env[62619]: DEBUG nova.compute.utils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1332.720080] env[62619]: DEBUG nova.compute.manager [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1332.720080] env[62619]: DEBUG nova.network.neutron [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1332.822478] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquiring lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.822746] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.928110] env[62619]: DEBUG nova.compute.manager [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1332.941312] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquiring lock "e98120b4-7916-4ce4-88ef-0c904852bb1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.941866] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lock "e98120b4-7916-4ce4-88ef-0c904852bb1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.100909] env[62619]: DEBUG nova.policy [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cde3ba9ee004055bb5e09bc932dc4f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0948c27a2b08413ba82d553452965c9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1333.230238] env[62619]: DEBUG nova.compute.manager [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1333.326562] env[62619]: DEBUG nova.compute.manager [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1333.447451] env[62619]: DEBUG nova.compute.manager [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1333.468185] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.725595] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21db495-e092-4dec-93da-2c03a885d482 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.733397] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af172f94-bbf6-4c86-b97c-5bfb4a79fa04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.780296] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3cd879-cda6-4d66-96a4-2706bc8625d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.787631] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb906bd-2142-4ba6-b2cb-60eb3ec2d2ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.809667] env[62619]: DEBUG nova.compute.provider_tree [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.854177] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.973687] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.003132] env[62619]: DEBUG nova.network.neutron [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Successfully created port: a5a1b25b-7dc6-4b53-90e5-f43a90048197 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1334.245112] env[62619]: DEBUG nova.compute.manager [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1334.274455] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1334.274569] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1334.274721] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1334.274932] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1334.275115] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1334.275291] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1334.275532] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1334.275777] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1334.275929] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1334.276332] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1334.276743] env[62619]: DEBUG nova.virt.hardware [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1334.277915] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a055d8-8e83-45c8-8b86-cf7414aa3530 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.288551] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24298b7-d6e4-4bfd-98fd-7534e6c6226c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.309836] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6451ea-bb23-4add-879b-81b744affb06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.322291] env[62619]: DEBUG nova.scheduler.client.report [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1334.710014] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.829048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.216s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.829574] env[62619]: DEBUG nova.compute.manager [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1334.838436] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.370s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.842869] env[62619]: INFO nova.compute.claims [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1335.348080] env[62619]: DEBUG nova.compute.utils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1335.351027] env[62619]: DEBUG nova.compute.manager [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1335.351027] env[62619]: DEBUG nova.network.neutron [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1335.446666] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "ac03bcf3-61df-4557-8018-0ad54ef30f17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.447071] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "ac03bcf3-61df-4557-8018-0ad54ef30f17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.507399] env[62619]: DEBUG nova.policy [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7492621ad8f42fab35b55b3615d38e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82e5715bea444a7d8da5c897258df611', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1335.709298] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.854147] env[62619]: DEBUG nova.compute.manager [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1335.951242] env[62619]: DEBUG nova.compute.manager [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1336.017922] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcd799d-e7f4-437f-a047-23e8269b9f3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.029730] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305feb27-70d8-4818-bf1e-d2fd6d418f68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.071167] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734e9fc7-88b5-4909-8ca7-047ac92a96e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.081783] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb6d6d6-2a6a-4d10-978a-674326f70431 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.096479] env[62619]: DEBUG nova.compute.provider_tree [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.424880] env[62619]: DEBUG nova.network.neutron [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Successfully created port: 50d706b8-8aee-4647-b813-73db43cd22df {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1336.480701] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.599532] env[62619]: DEBUG nova.scheduler.client.report [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1336.710253] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.710253] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1336.710253] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1336.862792] env[62619]: DEBUG nova.compute.manager [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1336.884648] env[62619]: DEBUG nova.network.neutron [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Successfully updated port: a5a1b25b-7dc6-4b53-90e5-f43a90048197 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1336.897386] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1336.900571] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1336.900571] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1336.900571] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1336.900571] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1336.900571] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1336.900852] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1336.900852] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1336.900852] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1336.900852] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1336.900852] env[62619]: DEBUG nova.virt.hardware [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1336.901289] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df58b3b7-33f6-48ae-b315-7241923ebe8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.914571] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53cf7fd-0f78-4fa1-ad1d-99c7aa9c488c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.111031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.270s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.111031] env[62619]: DEBUG nova.compute.manager [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1337.113068] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.259s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.115695] env[62619]: INFO nova.compute.claims [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1337.214808] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1337.214808] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1337.214808] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1337.214808] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1337.214984] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.215271] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.296192] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.296433] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.387221] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "refresh_cache-08c1fef9-40fc-4420-91de-fe911dea70f7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.387305] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "refresh_cache-08c1fef9-40fc-4420-91de-fe911dea70f7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.387448] env[62619]: DEBUG nova.network.neutron [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1337.620312] env[62619]: DEBUG nova.compute.utils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1337.626152] env[62619]: DEBUG nova.compute.manager [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1337.626344] env[62619]: DEBUG nova.network.neutron [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1337.675914] env[62619]: DEBUG nova.policy [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5498230ff22d4287881583bce7088b56', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e606dfc7f0742f48e37edbf8b5ca3bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1337.718916] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.799162] env[62619]: DEBUG nova.compute.manager [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1337.944372] env[62619]: DEBUG nova.network.neutron [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1338.111369] env[62619]: DEBUG nova.network.neutron [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Successfully created port: e9bd11b1-25ab-47d5-a138-85ec70cd7a3b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1338.128488] env[62619]: DEBUG nova.compute.manager [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1338.348022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.415608] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3a4d6e-27fa-4e25-843d-15dfec5e5941 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.427148] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f82235-94d4-4789-9037-25b4233d2b98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.459588] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5cb30c-f5bc-4544-b734-ecb137ff26e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.467890] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577a2bcf-2345-4de5-b095-4bd933208267 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.484287] env[62619]: DEBUG nova.compute.provider_tree [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.584641] env[62619]: DEBUG nova.network.neutron [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Updating instance_info_cache with network_info: [{"id": "a5a1b25b-7dc6-4b53-90e5-f43a90048197", "address": "fa:16:3e:fa:00:6e", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5a1b25b-7d", "ovs_interfaceid": "a5a1b25b-7dc6-4b53-90e5-f43a90048197", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.988427] env[62619]: DEBUG nova.scheduler.client.report [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1339.087500] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "refresh_cache-08c1fef9-40fc-4420-91de-fe911dea70f7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.087893] env[62619]: DEBUG nova.compute.manager [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Instance network_info: |[{"id": "a5a1b25b-7dc6-4b53-90e5-f43a90048197", "address": "fa:16:3e:fa:00:6e", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5a1b25b-7d", "ovs_interfaceid": "a5a1b25b-7dc6-4b53-90e5-f43a90048197", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1339.088423] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:00:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a27fd90b-16a5-43af-bede-ae36762ece00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a5a1b25b-7dc6-4b53-90e5-f43a90048197', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1339.103824] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1339.105343] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1607f880-d6a7-49e2-8d92-06b80ceeded9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.110772] env[62619]: DEBUG nova.compute.manager [req-a0f15afa-541b-41a0-9485-63467c2184fb req-bf2d7167-f850-49e3-a5b0-2170abe2b198 service nova] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Received event network-vif-plugged-a5a1b25b-7dc6-4b53-90e5-f43a90048197 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1339.110772] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0f15afa-541b-41a0-9485-63467c2184fb req-bf2d7167-f850-49e3-a5b0-2170abe2b198 service nova] Acquiring lock "08c1fef9-40fc-4420-91de-fe911dea70f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.110772] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0f15afa-541b-41a0-9485-63467c2184fb req-bf2d7167-f850-49e3-a5b0-2170abe2b198 service nova] Lock "08c1fef9-40fc-4420-91de-fe911dea70f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.110772] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0f15afa-541b-41a0-9485-63467c2184fb req-bf2d7167-f850-49e3-a5b0-2170abe2b198 service nova] Lock "08c1fef9-40fc-4420-91de-fe911dea70f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.110772] env[62619]: DEBUG nova.compute.manager [req-a0f15afa-541b-41a0-9485-63467c2184fb req-bf2d7167-f850-49e3-a5b0-2170abe2b198 service nova] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] No waiting events found dispatching network-vif-plugged-a5a1b25b-7dc6-4b53-90e5-f43a90048197 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1339.111165] env[62619]: WARNING nova.compute.manager [req-a0f15afa-541b-41a0-9485-63467c2184fb req-bf2d7167-f850-49e3-a5b0-2170abe2b198 service nova] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Received unexpected event network-vif-plugged-a5a1b25b-7dc6-4b53-90e5-f43a90048197 for instance with vm_state building and task_state spawning. [ 1339.127724] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Created folder: OpenStack in parent group-v4. [ 1339.128126] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating folder: Project (0948c27a2b08413ba82d553452965c9b). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1339.128224] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5d32417-598f-4a77-91c1-31a6aaa97c0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.138479] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Created folder: Project (0948c27a2b08413ba82d553452965c9b) in parent group-v368875. [ 1339.138479] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating folder: Instances. Parent ref: group-v368876. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1339.138479] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5327e9e9-eea9-4e12-b0c6-65df11f6d7db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.144505] env[62619]: DEBUG nova.compute.manager [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1339.149816] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Created folder: Instances in parent group-v368876. [ 1339.149816] env[62619]: DEBUG oslo.service.loopingcall [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1339.149816] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1339.149914] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d142d34c-cb24-44f7-b034-66ade368aecb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.174000] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1339.174100] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.174532] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1339.174532] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.174532] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1339.174701] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1339.174865] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1339.175031] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1339.175208] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1339.175347] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1339.175508] env[62619]: DEBUG nova.virt.hardware [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1339.176554] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed19453d-2eee-49d5-9d87-ea49c5e47cd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.183283] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1339.183283] env[62619]: value = "task-1777064" [ 1339.183283] env[62619]: _type = "Task" [ 1339.183283] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.195620] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777064, 'name': CreateVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.198780] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8925111-2c94-42b0-aba7-8de871ae37b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.317495] env[62619]: DEBUG nova.network.neutron [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Successfully updated port: 50d706b8-8aee-4647-b813-73db43cd22df {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1339.497405] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.384s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.498045] env[62619]: DEBUG nova.compute.manager [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1339.503983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.531s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.505644] env[62619]: INFO nova.compute.claims [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1339.563152] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquiring lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.563436] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.697528] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777064, 'name': CreateVM_Task, 'duration_secs': 0.38357} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.697757] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1339.714946] env[62619]: DEBUG oslo_vmware.service [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89415778-1f5c-4929-9051-20c542e3caa4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.727204] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.727204] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.727650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1339.729147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquiring lock "e0e25ddd-3692-480f-bfa0-212741c0d882" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.729884] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lock "e0e25ddd-3692-480f-bfa0-212741c0d882" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.730929] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfd58df6-bda0-4a27-9f43-5b44955965e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.737283] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1339.737283] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5278e1fe-fc50-66e3-0bb8-5e9d522a9d8d" [ 1339.737283] env[62619]: _type = "Task" [ 1339.737283] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.745993] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5278e1fe-fc50-66e3-0bb8-5e9d522a9d8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.820458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "refresh_cache-aa576459-65bf-4b16-ad1d-0930497522eb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.820611] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquired lock "refresh_cache-aa576459-65bf-4b16-ad1d-0930497522eb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.820771] env[62619]: DEBUG nova.network.neutron [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1340.015151] env[62619]: DEBUG nova.compute.utils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1340.015958] env[62619]: DEBUG nova.compute.manager [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1340.015958] env[62619]: DEBUG nova.network.neutron [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1340.070256] env[62619]: DEBUG nova.compute.manager [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1340.077767] env[62619]: DEBUG nova.policy [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df31406089874239be6496bc324a77a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4797964dae0547608a979967b802c24f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1340.234650] env[62619]: DEBUG nova.compute.manager [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1340.252388] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.252388] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1340.252388] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1340.252388] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.252388] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1340.252722] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2cb99cd-1486-4ef4-a692-afd497b35b3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.273483] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1340.273690] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1340.275042] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13fae231-34d8-41ed-9f17-5eb5a1d021f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.286620] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4370b84-f5b0-4701-ac35-0f9844bda941 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.290994] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1340.290994] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a00f34-9b8e-830e-be93-ed7a37a3436c" [ 1340.290994] env[62619]: _type = "Task" [ 1340.290994] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.299569] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a00f34-9b8e-830e-be93-ed7a37a3436c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.351759] env[62619]: DEBUG nova.network.neutron [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Successfully updated port: e9bd11b1-25ab-47d5-a138-85ec70cd7a3b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1340.430440] env[62619]: DEBUG nova.network.neutron [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1340.519267] env[62619]: DEBUG nova.compute.manager [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1340.557397] env[62619]: DEBUG nova.compute.manager [req-695c9e51-0bc1-4d69-bfd7-e5f5001273f6 req-99375e1a-65c9-4fb0-bd15-1749c3434016 service nova] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Received event network-vif-plugged-50d706b8-8aee-4647-b813-73db43cd22df {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1340.557397] env[62619]: DEBUG oslo_concurrency.lockutils [req-695c9e51-0bc1-4d69-bfd7-e5f5001273f6 req-99375e1a-65c9-4fb0-bd15-1749c3434016 service nova] Acquiring lock "aa576459-65bf-4b16-ad1d-0930497522eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.557397] env[62619]: DEBUG oslo_concurrency.lockutils [req-695c9e51-0bc1-4d69-bfd7-e5f5001273f6 req-99375e1a-65c9-4fb0-bd15-1749c3434016 service nova] Lock "aa576459-65bf-4b16-ad1d-0930497522eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.557397] env[62619]: DEBUG oslo_concurrency.lockutils [req-695c9e51-0bc1-4d69-bfd7-e5f5001273f6 req-99375e1a-65c9-4fb0-bd15-1749c3434016 service nova] Lock "aa576459-65bf-4b16-ad1d-0930497522eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.557397] env[62619]: DEBUG nova.compute.manager [req-695c9e51-0bc1-4d69-bfd7-e5f5001273f6 req-99375e1a-65c9-4fb0-bd15-1749c3434016 service nova] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] No waiting events found dispatching network-vif-plugged-50d706b8-8aee-4647-b813-73db43cd22df {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1340.560486] env[62619]: WARNING nova.compute.manager [req-695c9e51-0bc1-4d69-bfd7-e5f5001273f6 req-99375e1a-65c9-4fb0-bd15-1749c3434016 service nova] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Received unexpected event network-vif-plugged-50d706b8-8aee-4647-b813-73db43cd22df for instance with vm_state building and task_state spawning. [ 1340.572890] env[62619]: DEBUG nova.network.neutron [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Successfully created port: 1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1340.600787] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.740976] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affe6da8-ff9f-4326-96b6-4034c2b7ebe5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.752847] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0899079-0e28-494a-b80f-8f5646d3017e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.783558] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddebf762-a371-416d-ac6f-13217abd355d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.791127] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01cb784-0554-429c-bc7e-48e59eee6cc7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.818640] env[62619]: DEBUG nova.compute.provider_tree [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1340.828355] env[62619]: DEBUG nova.network.neutron [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Updating instance_info_cache with network_info: [{"id": "50d706b8-8aee-4647-b813-73db43cd22df", "address": "fa:16:3e:32:35:47", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50d706b8-8a", "ovs_interfaceid": "50d706b8-8aee-4647-b813-73db43cd22df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.830068] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Preparing fetch location {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1340.830291] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating directory with path [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1340.830694] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72337b02-56e1-47b5-8559-6d9fa68af91e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.854811] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquiring lock "refresh_cache-b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1340.855016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquired lock "refresh_cache-b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.855526] env[62619]: DEBUG nova.network.neutron [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1340.861711] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Created directory with path [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1340.861711] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Fetch image to [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c/tmp-sparse.vmdk {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1340.861711] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Downloading image file data 27a858d5-7985-4b17-8b01-50adcd8f566c to [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c/tmp-sparse.vmdk on the data store datastore1 {{(pid=62619) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1340.862507] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dc31f8-f683-4fff-b883-57100c8c2518 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.866415] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.874437] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f6e3c6-5bcd-4ec8-8bf9-c381097e2439 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.885933] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8a6eba-a3e1-4acc-91fe-444723525257 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.918700] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53bd82c5-3f78-4991-bbe9-8a28d2fa1fb1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.924988] env[62619]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2204da18-4aa6-4388-8ffb-ded01983ef19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.017009] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Downloading image file data 27a858d5-7985-4b17-8b01-50adcd8f566c to the data store datastore1 {{(pid=62619) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1341.108194] env[62619]: DEBUG oslo_vmware.rw_handles [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62619) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1341.332574] env[62619]: DEBUG nova.scheduler.client.report [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1341.344469] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Releasing lock "refresh_cache-aa576459-65bf-4b16-ad1d-0930497522eb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.344747] env[62619]: DEBUG nova.compute.manager [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Instance network_info: |[{"id": "50d706b8-8aee-4647-b813-73db43cd22df", "address": "fa:16:3e:32:35:47", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50d706b8-8a", "ovs_interfaceid": "50d706b8-8aee-4647-b813-73db43cd22df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1341.348714] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:35:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50d706b8-8aee-4647-b813-73db43cd22df', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1341.357896] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Creating folder: Project (82e5715bea444a7d8da5c897258df611). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1341.363123] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-120b5864-fed9-470e-a772-0557a97f022c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.376359] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Created folder: Project (82e5715bea444a7d8da5c897258df611) in parent group-v368875. [ 1341.376359] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Creating folder: Instances. Parent ref: group-v368879. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1341.376774] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97a92e60-978c-4167-9965-1285d7cc7ed0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.389540] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Created folder: Instances in parent group-v368879. [ 1341.390148] env[62619]: DEBUG oslo.service.loopingcall [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1341.391949] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1341.392308] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13455557-fe35-46fe-a936-b476c079e21e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.416368] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1341.416368] env[62619]: value = "task-1777067" [ 1341.416368] env[62619]: _type = "Task" [ 1341.416368] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.419015] env[62619]: DEBUG nova.network.neutron [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1341.429577] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777067, 'name': CreateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.544585] env[62619]: DEBUG nova.compute.manager [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1341.587930] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1341.587930] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1341.587930] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1341.588505] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1341.588505] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1341.588505] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1341.588505] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1341.588505] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1341.588704] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1341.588704] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1341.588704] env[62619]: DEBUG nova.virt.hardware [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1341.588704] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83055602-59fd-447d-aa86-2d92e6eca374 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.604837] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413db619-1836-4f83-b30e-3419179d0975 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.721910] env[62619]: DEBUG nova.network.neutron [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Updating instance_info_cache with network_info: [{"id": "e9bd11b1-25ab-47d5-a138-85ec70cd7a3b", "address": "fa:16:3e:30:22:6a", "network": {"id": "b07fc3fc-86b1-4689-bc3e-430ec15c2d40", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1955054791-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e606dfc7f0742f48e37edbf8b5ca3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9bd11b1-25", "ovs_interfaceid": "e9bd11b1-25ab-47d5-a138-85ec70cd7a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.804414] env[62619]: DEBUG oslo_vmware.rw_handles [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Completed reading data from the image iterator. {{(pid=62619) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1341.804619] env[62619]: DEBUG oslo_vmware.rw_handles [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1341.846880] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.847419] env[62619]: DEBUG nova.compute.manager [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1341.852363] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.372s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.853812] env[62619]: INFO nova.compute.claims [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1341.927889] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777067, 'name': CreateVM_Task, 'duration_secs': 0.377703} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.928072] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1341.928749] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.928935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.929376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1341.929672] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e09b23c-1aee-465c-8580-47efd614e450 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.934546] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1341.934546] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c8969b-83c1-64c3-eab7-a470d9bc9e45" [ 1341.934546] env[62619]: _type = "Task" [ 1341.934546] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.944098] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c8969b-83c1-64c3-eab7-a470d9bc9e45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.962023] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Downloaded image file data 27a858d5-7985-4b17-8b01-50adcd8f566c to vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c/tmp-sparse.vmdk on the data store datastore1 {{(pid=62619) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1341.966251] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Caching image {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1341.966251] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copying Virtual Disk [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c/tmp-sparse.vmdk to [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1341.966251] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d479b28c-26ea-4681-b7d0-abbf5c6c68c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.974240] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1341.974240] env[62619]: value = "task-1777068" [ 1341.974240] env[62619]: _type = "Task" [ 1341.974240] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.983784] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777068, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.007021] env[62619]: DEBUG nova.compute.manager [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Received event network-changed-a5a1b25b-7dc6-4b53-90e5-f43a90048197 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1342.007281] env[62619]: DEBUG nova.compute.manager [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Refreshing instance network info cache due to event network-changed-a5a1b25b-7dc6-4b53-90e5-f43a90048197. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1342.007505] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] Acquiring lock "refresh_cache-08c1fef9-40fc-4420-91de-fe911dea70f7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.007647] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] Acquired lock "refresh_cache-08c1fef9-40fc-4420-91de-fe911dea70f7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.007826] env[62619]: DEBUG nova.network.neutron [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Refreshing network info cache for port a5a1b25b-7dc6-4b53-90e5-f43a90048197 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1342.227623] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Releasing lock "refresh_cache-b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.227976] env[62619]: DEBUG nova.compute.manager [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Instance network_info: |[{"id": "e9bd11b1-25ab-47d5-a138-85ec70cd7a3b", "address": "fa:16:3e:30:22:6a", "network": {"id": "b07fc3fc-86b1-4689-bc3e-430ec15c2d40", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1955054791-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e606dfc7f0742f48e37edbf8b5ca3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9bd11b1-25", "ovs_interfaceid": "e9bd11b1-25ab-47d5-a138-85ec70cd7a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1342.228454] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:22:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c47e98ff-83cf-48d2-bf91-2931c7386b6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9bd11b1-25ab-47d5-a138-85ec70cd7a3b', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1342.236638] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Creating folder: Project (4e606dfc7f0742f48e37edbf8b5ca3bb). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1342.236970] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-179f774b-ddae-486f-b1b6-1f0f620d4293 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.249342] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Created folder: Project (4e606dfc7f0742f48e37edbf8b5ca3bb) in parent group-v368875. [ 1342.250378] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Creating folder: Instances. Parent ref: group-v368882. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1342.250378] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5f9f97c-f732-42b2-9d71-c68f6d3c2dfa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.262129] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Created folder: Instances in parent group-v368882. [ 1342.262559] env[62619]: DEBUG oslo.service.loopingcall [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1342.262806] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1342.263035] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a9f57cd-8522-438b-9652-eead34fbe58f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.293612] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1342.293612] env[62619]: value = "task-1777071" [ 1342.293612] env[62619]: _type = "Task" [ 1342.293612] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.302402] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777071, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.364777] env[62619]: DEBUG nova.compute.utils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1342.366524] env[62619]: DEBUG nova.compute.manager [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1342.366755] env[62619]: DEBUG nova.network.neutron [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1342.454223] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.454500] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1342.454808] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.487117] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777068, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.687437] env[62619]: DEBUG nova.policy [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98e869dd0566492cb0da86db40ffe78c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2066ce6a2aeb4a0eb4bee782cfcdfa42', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1342.806906] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777071, 'name': CreateVM_Task, 'duration_secs': 0.458662} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.806906] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1342.807060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.808453] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.808453] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1342.808453] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01df7b7c-856a-43ac-9551-92cfd818000a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.813789] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1342.813789] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525617cf-b6a2-f50c-4f0c-865363d8f372" [ 1342.813789] env[62619]: _type = "Task" [ 1342.813789] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.823972] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525617cf-b6a2-f50c-4f0c-865363d8f372, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.873514] env[62619]: DEBUG nova.compute.manager [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1342.994503] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777068, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790634} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.994503] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copied Virtual Disk [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c/tmp-sparse.vmdk to [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1342.994503] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleting the datastore file [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c/tmp-sparse.vmdk {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1342.994724] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be06b5d3-672e-49c3-8b3e-161d4adc87f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.004797] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1343.004797] env[62619]: value = "task-1777072" [ 1343.004797] env[62619]: _type = "Task" [ 1343.004797] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.012744] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.052338] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13746cbd-5dea-4bf9-824e-d60e51cb8689 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.060110] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f64ce1-182e-4ed7-913e-c51451dd6e87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.092989] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e26c8a-3293-4172-b30a-b856fefb7a03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.101136] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028ee00b-2303-4f8a-a046-fd713e922c09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.122988] env[62619]: DEBUG nova.compute.provider_tree [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1343.326641] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.326899] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1343.327745] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.354701] env[62619]: DEBUG nova.network.neutron [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Updated VIF entry in instance network info cache for port a5a1b25b-7dc6-4b53-90e5-f43a90048197. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1343.355259] env[62619]: DEBUG nova.network.neutron [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Updating instance_info_cache with network_info: [{"id": "a5a1b25b-7dc6-4b53-90e5-f43a90048197", "address": "fa:16:3e:fa:00:6e", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5a1b25b-7d", "ovs_interfaceid": "a5a1b25b-7dc6-4b53-90e5-f43a90048197", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.513901] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025227} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.514354] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1343.514354] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Moving file from [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038/27a858d5-7985-4b17-8b01-50adcd8f566c to [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c. {{(pid=62619) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1343.515084] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-04459b3d-bfa3-4c47-a36b-108e91b07ec4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.521698] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1343.521698] env[62619]: value = "task-1777073" [ 1343.521698] env[62619]: _type = "Task" [ 1343.521698] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.534754] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777073, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.617404] env[62619]: DEBUG nova.compute.manager [req-cba6a28c-2f7e-487d-8cf5-1a6d525b8fd5 req-93af0371-646b-4001-b0b3-2290958ba0be service nova] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Received event network-changed-50d706b8-8aee-4647-b813-73db43cd22df {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1343.617617] env[62619]: DEBUG nova.compute.manager [req-cba6a28c-2f7e-487d-8cf5-1a6d525b8fd5 req-93af0371-646b-4001-b0b3-2290958ba0be service nova] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Refreshing instance network info cache due to event network-changed-50d706b8-8aee-4647-b813-73db43cd22df. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1343.617945] env[62619]: DEBUG oslo_concurrency.lockutils [req-cba6a28c-2f7e-487d-8cf5-1a6d525b8fd5 req-93af0371-646b-4001-b0b3-2290958ba0be service nova] Acquiring lock "refresh_cache-aa576459-65bf-4b16-ad1d-0930497522eb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.618012] env[62619]: DEBUG oslo_concurrency.lockutils [req-cba6a28c-2f7e-487d-8cf5-1a6d525b8fd5 req-93af0371-646b-4001-b0b3-2290958ba0be service nova] Acquired lock "refresh_cache-aa576459-65bf-4b16-ad1d-0930497522eb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.619586] env[62619]: DEBUG nova.network.neutron [req-cba6a28c-2f7e-487d-8cf5-1a6d525b8fd5 req-93af0371-646b-4001-b0b3-2290958ba0be service nova] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Refreshing network info cache for port 50d706b8-8aee-4647-b813-73db43cd22df {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1343.627801] env[62619]: DEBUG nova.scheduler.client.report [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1343.713776] env[62619]: DEBUG nova.network.neutron [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Successfully updated port: 1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1343.860572] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] Releasing lock "refresh_cache-08c1fef9-40fc-4420-91de-fe911dea70f7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.860633] env[62619]: DEBUG nova.compute.manager [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Received event network-vif-plugged-e9bd11b1-25ab-47d5-a138-85ec70cd7a3b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1343.863890] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] Acquiring lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.863890] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] Lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.863890] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] Lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.863890] env[62619]: DEBUG nova.compute.manager [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] No waiting events found dispatching network-vif-plugged-e9bd11b1-25ab-47d5-a138-85ec70cd7a3b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1343.863890] env[62619]: WARNING nova.compute.manager [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Received unexpected event network-vif-plugged-e9bd11b1-25ab-47d5-a138-85ec70cd7a3b for instance with vm_state building and task_state spawning. [ 1343.864240] env[62619]: DEBUG nova.compute.manager [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Received event network-changed-e9bd11b1-25ab-47d5-a138-85ec70cd7a3b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1343.864240] env[62619]: DEBUG nova.compute.manager [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Refreshing instance network info cache due to event network-changed-e9bd11b1-25ab-47d5-a138-85ec70cd7a3b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1343.864240] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] Acquiring lock "refresh_cache-b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.864240] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] Acquired lock "refresh_cache-b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.864240] env[62619]: DEBUG nova.network.neutron [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Refreshing network info cache for port e9bd11b1-25ab-47d5-a138-85ec70cd7a3b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1343.885728] env[62619]: DEBUG nova.compute.manager [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1343.922553] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1343.926017] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1343.926017] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1343.926017] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1343.926017] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1343.926017] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1343.926370] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1343.926370] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1343.926370] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1343.926370] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1343.926370] env[62619]: DEBUG nova.virt.hardware [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1343.926985] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bacb828-a4f0-478c-bdae-cacf8fa26874 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.935469] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296afe60-3266-4db2-9370-1fb816323c2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.032484] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777073, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.042358} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.032850] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] File moved {{(pid=62619) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1344.033137] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Cleaning up location [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1344.033360] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleting the datastore file [datastore1] vmware_temp/8698782c-6c54-4209-8869-4e6042bea038 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1344.033559] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8741a85e-1aaa-47d2-89fd-014186dc5fb6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.040615] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1344.040615] env[62619]: value = "task-1777074" [ 1344.040615] env[62619]: _type = "Task" [ 1344.040615] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.051376] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.103389] env[62619]: DEBUG nova.network.neutron [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Successfully created port: 0c0de74a-465b-4ba6-bc5c-860a32c90ff1 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1344.136162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.136162] env[62619]: DEBUG nova.compute.manager [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1344.139617] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.421s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.139918] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.139998] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1344.141221] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.796s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.148019] env[62619]: INFO nova.compute.claims [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1344.149536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce0d508-d68c-48a8-b4b2-a166712d81e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.160789] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c51d8e6-c012-48b5-8b53-719027ead6c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.174235] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0caa9dd-7532-4eb8-a549-37deab6e1eb6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.181716] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a00d4cb-2fbe-4596-b539-0dd0fb7e1d28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.218978] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181367MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1344.219172] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.220806] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquiring lock "refresh_cache-4f08d36b-f26e-499e-a4be-d8cbb481a44d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.221705] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquired lock "refresh_cache-4f08d36b-f26e-499e-a4be-d8cbb481a44d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.221705] env[62619]: DEBUG nova.network.neutron [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1344.558896] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777074, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026271} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.559220] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1344.560509] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f43cdb2f-e1de-4a1a-a9d6-497dfadc1f26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.569037] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1344.569037] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a08174-83a9-604c-2ee4-de3044698a46" [ 1344.569037] env[62619]: _type = "Task" [ 1344.569037] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.578082] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a08174-83a9-604c-2ee4-de3044698a46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.652657] env[62619]: DEBUG nova.compute.utils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1344.656991] env[62619]: DEBUG nova.compute.manager [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1344.658264] env[62619]: DEBUG nova.network.neutron [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1344.781689] env[62619]: DEBUG nova.policy [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b0ec7a74ec944dd8b0417df0178ab15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e471e7b32b154c1db2eac990fd11e539', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1344.807019] env[62619]: DEBUG nova.network.neutron [req-cba6a28c-2f7e-487d-8cf5-1a6d525b8fd5 req-93af0371-646b-4001-b0b3-2290958ba0be service nova] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Updated VIF entry in instance network info cache for port 50d706b8-8aee-4647-b813-73db43cd22df. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1344.807019] env[62619]: DEBUG nova.network.neutron [req-cba6a28c-2f7e-487d-8cf5-1a6d525b8fd5 req-93af0371-646b-4001-b0b3-2290958ba0be service nova] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Updating instance_info_cache with network_info: [{"id": "50d706b8-8aee-4647-b813-73db43cd22df", "address": "fa:16:3e:32:35:47", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50d706b8-8a", "ovs_interfaceid": "50d706b8-8aee-4647-b813-73db43cd22df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.816792] env[62619]: DEBUG nova.network.neutron [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1344.996158] env[62619]: DEBUG nova.network.neutron [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Updated VIF entry in instance network info cache for port e9bd11b1-25ab-47d5-a138-85ec70cd7a3b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1344.996158] env[62619]: DEBUG nova.network.neutron [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Updating instance_info_cache with network_info: [{"id": "e9bd11b1-25ab-47d5-a138-85ec70cd7a3b", "address": "fa:16:3e:30:22:6a", "network": {"id": "b07fc3fc-86b1-4689-bc3e-430ec15c2d40", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1955054791-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e606dfc7f0742f48e37edbf8b5ca3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9bd11b1-25", "ovs_interfaceid": "e9bd11b1-25ab-47d5-a138-85ec70cd7a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.081846] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a08174-83a9-604c-2ee4-de3044698a46, 'name': SearchDatastore_Task, 'duration_secs': 0.010997} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.082397] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.082516] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 08c1fef9-40fc-4420-91de-fe911dea70f7/08c1fef9-40fc-4420-91de-fe911dea70f7.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1345.082667] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.082858] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1345.083064] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03cd687d-e4c0-470e-886b-a3ebbb2aa097 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.085767] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e57d347-c60e-449b-a524-57550f4dde76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.093626] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1345.093626] env[62619]: value = "task-1777075" [ 1345.093626] env[62619]: _type = "Task" [ 1345.093626] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.095838] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1345.095838] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1345.100206] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3bc6a51-797d-4fc3-ba46-d01dc7d0581f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.108413] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777075, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.110131] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1345.110131] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c56d8-2d37-013a-7264-da28871c9774" [ 1345.110131] env[62619]: _type = "Task" [ 1345.110131] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.119666] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c56d8-2d37-013a-7264-da28871c9774, 'name': SearchDatastore_Task, 'duration_secs': 0.009309} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.120761] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6abf649-40d6-4444-921a-08797f2a0e3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.126726] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1345.126726] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528c48ba-5bbb-105c-ed3b-de83b371f6a8" [ 1345.126726] env[62619]: _type = "Task" [ 1345.126726] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.138987] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528c48ba-5bbb-105c-ed3b-de83b371f6a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.167914] env[62619]: DEBUG nova.compute.manager [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1345.308371] env[62619]: DEBUG oslo_concurrency.lockutils [req-cba6a28c-2f7e-487d-8cf5-1a6d525b8fd5 req-93af0371-646b-4001-b0b3-2290958ba0be service nova] Releasing lock "refresh_cache-aa576459-65bf-4b16-ad1d-0930497522eb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.332568] env[62619]: DEBUG nova.network.neutron [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Updating instance_info_cache with network_info: [{"id": "1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a", "address": "fa:16:3e:c0:f5:77", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.102", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1de1b35e-d2", "ovs_interfaceid": "1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.354368] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa9ec20-09fe-488c-9d92-03a12a3dfc03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.363822] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dacd0e9-98c4-41b5-8519-5ebe4014ac7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.406443] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5354849-d01d-4f87-82b4-c17ec33a551d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.415424] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c65828c-3f3c-45fa-8973-5563e7622272 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.432749] env[62619]: DEBUG nova.compute.provider_tree [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1345.507371] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9494fee-24fe-4c41-be8e-e046950a3b45 req-a07aeece-9a56-421b-ac36-fcbe6d0a843c service nova] Releasing lock "refresh_cache-b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.609063] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777075, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.639348] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528c48ba-5bbb-105c-ed3b-de83b371f6a8, 'name': SearchDatastore_Task, 'duration_secs': 0.008432} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.639651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.640170] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] aa576459-65bf-4b16-ad1d-0930497522eb/aa576459-65bf-4b16-ad1d-0930497522eb.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1345.640529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.641015] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1345.641342] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76ae2fbe-7830-44f8-adb4-1af22fe2a1fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.644062] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a52fcee6-322a-488b-bf25-327fd2134d48 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.652706] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1345.652706] env[62619]: value = "task-1777076" [ 1345.652706] env[62619]: _type = "Task" [ 1345.652706] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.664737] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.666177] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1345.666568] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1345.667199] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b5c55d0-1da9-45ca-8474-e27228c1afb5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.679460] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1345.679460] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5240a5da-3050-3b61-d2c0-e9f059b1fb8f" [ 1345.679460] env[62619]: _type = "Task" [ 1345.679460] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.687598] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5240a5da-3050-3b61-d2c0-e9f059b1fb8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.839184] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Releasing lock "refresh_cache-4f08d36b-f26e-499e-a4be-d8cbb481a44d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.839885] env[62619]: DEBUG nova.compute.manager [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Instance network_info: |[{"id": "1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a", "address": "fa:16:3e:c0:f5:77", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.102", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1de1b35e-d2", "ovs_interfaceid": "1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1345.840038] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:f5:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1345.849022] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Creating folder: Project (4797964dae0547608a979967b802c24f). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1345.849337] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac06d54f-8c89-4b85-ac16-4568e80fb035 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.860687] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Created folder: Project (4797964dae0547608a979967b802c24f) in parent group-v368875. [ 1345.860792] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Creating folder: Instances. Parent ref: group-v368885. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1345.860983] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cdb93d2-7b4f-4572-b6a3-437ca930c04d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.871077] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Created folder: Instances in parent group-v368885. [ 1345.871350] env[62619]: DEBUG oslo.service.loopingcall [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.872297] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1345.872297] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4107d682-a249-483e-b145-5115783f6e8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.896710] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1345.896710] env[62619]: value = "task-1777079" [ 1345.896710] env[62619]: _type = "Task" [ 1345.896710] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.905342] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777079, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.936474] env[62619]: DEBUG nova.scheduler.client.report [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1345.954473] env[62619]: DEBUG nova.network.neutron [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Successfully created port: b6faf342-2332-4eee-bdde-dafce4f0a856 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1346.106492] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777075, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.166329] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.181267] env[62619]: DEBUG nova.compute.manager [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1346.196858] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5240a5da-3050-3b61-d2c0-e9f059b1fb8f, 'name': SearchDatastore_Task, 'duration_secs': 0.106054} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.197363] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-089d78a8-2d44-4eba-9247-cdb8221fc6c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.207375] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1346.207375] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5244886e-8554-5715-7b45-cb26b7d7b245" [ 1346.207375] env[62619]: _type = "Task" [ 1346.207375] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.214788] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5244886e-8554-5715-7b45-cb26b7d7b245, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.279938] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1346.279938] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1346.280240] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1346.280240] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1346.280395] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1346.280492] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1346.281044] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1346.281044] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1346.281161] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1346.281509] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1346.281509] env[62619]: DEBUG nova.virt.hardware [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1346.283815] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c64ce53-b585-4179-af03-88e63b9c1974 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.292229] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98ea74c-1489-4a85-9533-8afac833c8c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.408786] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777079, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.443242] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.443834] env[62619]: DEBUG nova.compute.manager [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1346.447450] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.846s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.448890] env[62619]: INFO nova.compute.claims [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1346.613252] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777075, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.664245] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.717472] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5244886e-8554-5715-7b45-cb26b7d7b245, 'name': SearchDatastore_Task, 'duration_secs': 0.01103} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.717735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.718022] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] b334cb41-5ddf-4545-8e2a-97c4d1de7cbf/b334cb41-5ddf-4545-8e2a-97c4d1de7cbf.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1346.718295] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7be02c3e-ec76-484f-b2fb-0d3d9f86bda1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.725343] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1346.725343] env[62619]: value = "task-1777080" [ 1346.725343] env[62619]: _type = "Task" [ 1346.725343] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.739229] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777080, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.909943] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777079, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.957129] env[62619]: DEBUG nova.compute.utils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1346.960571] env[62619]: DEBUG nova.compute.manager [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1346.960741] env[62619]: DEBUG nova.network.neutron [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1347.038820] env[62619]: DEBUG nova.policy [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b0ec7a74ec944dd8b0417df0178ab15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e471e7b32b154c1db2eac990fd11e539', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1347.113378] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777075, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.816601} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.113774] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 08c1fef9-40fc-4420-91de-fe911dea70f7/08c1fef9-40fc-4420-91de-fe911dea70f7.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1347.115748] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1347.116102] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd658404-a190-47c4-9231-1508bfc7e751 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.122533] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1347.122533] env[62619]: value = "task-1777081" [ 1347.122533] env[62619]: _type = "Task" [ 1347.122533] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.132914] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777081, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.165680] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777076, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.236553] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777080, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.379295] env[62619]: DEBUG nova.network.neutron [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Successfully updated port: 0c0de74a-465b-4ba6-bc5c-860a32c90ff1 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1347.414518] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777079, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.466257] env[62619]: DEBUG nova.compute.manager [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1347.640326] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777081, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.656382] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8efabc-9f9d-4dac-b438-303613e9fcb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.671543] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb536517-daa7-440a-95b2-9caf316d6f71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.677082] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777076, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.707126] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635c4a07-16fe-49ae-b491-bee44ea3b80d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.713168] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.713464] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.720426] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f311047a-ea5d-4a14-bccf-09d6e9a67adf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.740858] env[62619]: DEBUG nova.compute.provider_tree [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1347.744641] env[62619]: DEBUG nova.network.neutron [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Successfully created port: b80ccdea-ed2b-4257-8c43-ae663d8b8bbc {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1347.750557] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777080, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.882310] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquiring lock "refresh_cache-e98120b4-7916-4ce4-88ef-0c904852bb1f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.882500] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquired lock "refresh_cache-e98120b4-7916-4ce4-88ef-0c904852bb1f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.882678] env[62619]: DEBUG nova.network.neutron [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1347.920178] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777079, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.937856] env[62619]: DEBUG nova.compute.manager [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Received event network-vif-plugged-1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1347.938029] env[62619]: DEBUG oslo_concurrency.lockutils [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] Acquiring lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.938242] env[62619]: DEBUG oslo_concurrency.lockutils [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] Lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.938396] env[62619]: DEBUG oslo_concurrency.lockutils [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] Lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.938555] env[62619]: DEBUG nova.compute.manager [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] No waiting events found dispatching network-vif-plugged-1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1347.938795] env[62619]: WARNING nova.compute.manager [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Received unexpected event network-vif-plugged-1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a for instance with vm_state building and task_state spawning. [ 1347.939021] env[62619]: DEBUG nova.compute.manager [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Received event network-changed-1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1347.939021] env[62619]: DEBUG nova.compute.manager [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Refreshing instance network info cache due to event network-changed-1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1347.941715] env[62619]: DEBUG oslo_concurrency.lockutils [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] Acquiring lock "refresh_cache-4f08d36b-f26e-499e-a4be-d8cbb481a44d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.945259] env[62619]: DEBUG oslo_concurrency.lockutils [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] Acquired lock "refresh_cache-4f08d36b-f26e-499e-a4be-d8cbb481a44d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.946125] env[62619]: DEBUG nova.network.neutron [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Refreshing network info cache for port 1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1348.132789] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777081, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.170790] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777076, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.515391} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.170790] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] aa576459-65bf-4b16-ad1d-0930497522eb/aa576459-65bf-4b16-ad1d-0930497522eb.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1348.171508] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1348.171508] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8600a9e-d408-41e5-a780-c8c09f0e5adb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.178210] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1348.178210] env[62619]: value = "task-1777082" [ 1348.178210] env[62619]: _type = "Task" [ 1348.178210] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.187086] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777082, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.216237] env[62619]: DEBUG nova.compute.manager [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1348.240447] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777080, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.434633} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.240447] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] b334cb41-5ddf-4545-8e2a-97c4d1de7cbf/b334cb41-5ddf-4545-8e2a-97c4d1de7cbf.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1348.240447] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1348.240447] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06ef3d40-bb6b-4376-b421-48290e4bc832 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.251799] env[62619]: DEBUG nova.scheduler.client.report [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1348.255323] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1348.255323] env[62619]: value = "task-1777083" [ 1348.255323] env[62619]: _type = "Task" [ 1348.255323] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.264275] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777083, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.414510] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777079, 'name': CreateVM_Task, 'duration_secs': 2.399715} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.414830] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1348.417963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1348.417963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.417963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1348.417963] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4af718c8-8353-44cc-a0ae-98d2d3ffcea0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.422717] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for the task: (returnval){ [ 1348.422717] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52405a39-9b71-6721-cc58-8ff911613412" [ 1348.422717] env[62619]: _type = "Task" [ 1348.422717] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.438703] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52405a39-9b71-6721-cc58-8ff911613412, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.480792] env[62619]: DEBUG nova.compute.manager [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1348.488925] env[62619]: DEBUG nova.network.neutron [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1348.515000] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1348.515313] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1348.515468] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1348.515643] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1348.515782] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1348.515918] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1348.516745] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1348.517178] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1348.517251] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1348.517417] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1348.518122] env[62619]: DEBUG nova.virt.hardware [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1348.519179] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df7d247-3e22-4257-9275-1e4fab56bacf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.529554] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b750e3fe-0834-42c3-a739-e1a820d8b7ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.574178] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquiring lock "ed34ae20-a891-45aa-8124-f36f264937f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.574416] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lock "ed34ae20-a891-45aa-8124-f36f264937f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.634825] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777081, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.052956} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.639142] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1348.640081] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc78250-2b64-41b6-853e-b3ca3fba3be8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.668211] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 08c1fef9-40fc-4420-91de-fe911dea70f7/08c1fef9-40fc-4420-91de-fe911dea70f7.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1348.670869] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1254c1b4-e499-4f61-99b6-8b33047561be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.696679] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777082, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.234934} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.698115] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1348.698605] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1348.698605] env[62619]: value = "task-1777084" [ 1348.698605] env[62619]: _type = "Task" [ 1348.698605] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.699149] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95212b16-6d73-4680-a430-c007a520a63e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.733253] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] aa576459-65bf-4b16-ad1d-0930497522eb/aa576459-65bf-4b16-ad1d-0930497522eb.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1348.737581] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777084, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.737581] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f10a5ea0-a7da-4075-90ce-ce8d711f0dbe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.755956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.309s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.755956] env[62619]: DEBUG nova.compute.manager [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1348.763441] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.897s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.764949] env[62619]: INFO nova.compute.claims [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1348.771760] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1348.771760] env[62619]: value = "task-1777085" [ 1348.771760] env[62619]: _type = "Task" [ 1348.771760] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.784468] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.785305] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777083, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084869} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.787448] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1348.792946] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c21ab89-cb2a-48f9-9e6f-afde724bdef7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.797423] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.821770] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] b334cb41-5ddf-4545-8e2a-97c4d1de7cbf/b334cb41-5ddf-4545-8e2a-97c4d1de7cbf.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1348.821911] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-144d5951-e214-4e22-abb1-6737bfb016c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.847130] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1348.847130] env[62619]: value = "task-1777086" [ 1348.847130] env[62619]: _type = "Task" [ 1348.847130] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.855913] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777086, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.940406] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52405a39-9b71-6721-cc58-8ff911613412, 'name': SearchDatastore_Task, 'duration_secs': 0.046679} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.940753] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.941020] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1348.945426] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1348.946352] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.946352] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1348.946352] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45bdf356-90a4-48d3-997b-efb377f94d32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.959652] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1348.960416] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1348.963567] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-827bcbb1-ce9f-4189-8651-3abbf6fc9036 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.967087] env[62619]: DEBUG nova.network.neutron [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Updated VIF entry in instance network info cache for port 1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1348.967484] env[62619]: DEBUG nova.network.neutron [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Updating instance_info_cache with network_info: [{"id": "1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a", "address": "fa:16:3e:c0:f5:77", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.102", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1de1b35e-d2", "ovs_interfaceid": "1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.973667] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for the task: (returnval){ [ 1348.973667] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529f4122-3002-feea-b2e9-b6bfaca6a49b" [ 1348.973667] env[62619]: _type = "Task" [ 1348.973667] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.983724] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529f4122-3002-feea-b2e9-b6bfaca6a49b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.174546] env[62619]: DEBUG nova.network.neutron [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Updating instance_info_cache with network_info: [{"id": "0c0de74a-465b-4ba6-bc5c-860a32c90ff1", "address": "fa:16:3e:62:5d:92", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c0de74a-46", "ovs_interfaceid": "0c0de74a-465b-4ba6-bc5c-860a32c90ff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.201747] env[62619]: DEBUG nova.network.neutron [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Successfully updated port: b6faf342-2332-4eee-bdde-dafce4f0a856 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1349.213820] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777084, 'name': ReconfigVM_Task, 'duration_secs': 0.409787} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.214139] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 08c1fef9-40fc-4420-91de-fe911dea70f7/08c1fef9-40fc-4420-91de-fe911dea70f7.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1349.215751] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f3ee267-dbff-4b3d-963f-266da602605b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.224371] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1349.224371] env[62619]: value = "task-1777087" [ 1349.224371] env[62619]: _type = "Task" [ 1349.224371] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.234142] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777087, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.277486] env[62619]: DEBUG nova.compute.utils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1349.279247] env[62619]: DEBUG nova.compute.manager [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1349.282789] env[62619]: DEBUG nova.network.neutron [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1349.304019] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777085, 'name': ReconfigVM_Task, 'duration_secs': 0.41227} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.304019] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Reconfigured VM instance instance-00000002 to attach disk [datastore1] aa576459-65bf-4b16-ad1d-0930497522eb/aa576459-65bf-4b16-ad1d-0930497522eb.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1349.306178] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-096c0fad-fdaf-4a72-abfe-51371bca0158 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.314312] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1349.314312] env[62619]: value = "task-1777088" [ 1349.314312] env[62619]: _type = "Task" [ 1349.314312] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.331753] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777088, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.358017] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777086, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.473304] env[62619]: DEBUG oslo_concurrency.lockutils [req-34b930fe-7a4b-4aa2-b3c7-84405e70115e req-c7eaabbc-4ef1-4bd8-8dbb-6e09a1eed518 service nova] Releasing lock "refresh_cache-4f08d36b-f26e-499e-a4be-d8cbb481a44d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.493443] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529f4122-3002-feea-b2e9-b6bfaca6a49b, 'name': SearchDatastore_Task, 'duration_secs': 0.021146} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.496103] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74cf85d1-d9c2-48a7-830c-ec0a05b475a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.500062] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for the task: (returnval){ [ 1349.500062] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5251bc2d-8d23-2351-7282-e8b448391e4e" [ 1349.500062] env[62619]: _type = "Task" [ 1349.500062] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.515751] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5251bc2d-8d23-2351-7282-e8b448391e4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.559294] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb1b81e-607c-4c80-b971-db149a886696 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.571285] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef06f07-fd0e-42ea-9f80-53d1c7003faf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.620500] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d354976-35ce-496c-8af8-b507954948d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.638776] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252bee60-96bf-45e5-ba36-0ab9c6995380 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.647702] env[62619]: DEBUG nova.policy [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e223a49dd69e4c07b131b6474ebfece8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ef3f0ff9d6b41b2b73a23548c9bdf5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1349.664174] env[62619]: DEBUG nova.compute.provider_tree [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1349.679426] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Releasing lock "refresh_cache-e98120b4-7916-4ce4-88ef-0c904852bb1f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.679755] env[62619]: DEBUG nova.compute.manager [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Instance network_info: |[{"id": "0c0de74a-465b-4ba6-bc5c-860a32c90ff1", "address": "fa:16:3e:62:5d:92", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c0de74a-46", "ovs_interfaceid": "0c0de74a-465b-4ba6-bc5c-860a32c90ff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1349.680517] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:5d:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c0de74a-465b-4ba6-bc5c-860a32c90ff1', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1349.692770] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Creating folder: Project (2066ce6a2aeb4a0eb4bee782cfcdfa42). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1349.692770] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9cfdcf3c-b90b-470a-9c8c-174dd2565530 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.701649] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Created folder: Project (2066ce6a2aeb4a0eb4bee782cfcdfa42) in parent group-v368875. [ 1349.701724] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Creating folder: Instances. Parent ref: group-v368888. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1349.704032] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87c46dfd-f6db-4644-af8e-e44dc3c40cde {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.710261] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "refresh_cache-ac03bcf3-61df-4557-8018-0ad54ef30f17" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.710307] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "refresh_cache-ac03bcf3-61df-4557-8018-0ad54ef30f17" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.710454] env[62619]: DEBUG nova.network.neutron [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1349.722110] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Created folder: Instances in parent group-v368888. [ 1349.722110] env[62619]: DEBUG oslo.service.loopingcall [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1349.722110] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1349.722110] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9aeecc4b-0c0a-40ae-802d-1728f8bce6c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.753645] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1349.753645] env[62619]: value = "task-1777091" [ 1349.753645] env[62619]: _type = "Task" [ 1349.753645] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.753959] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777087, 'name': Rename_Task, 'duration_secs': 0.214473} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.754306] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1349.761764] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48fb384f-0517-4845-ac5e-9f00e05821e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.774331] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1349.774331] env[62619]: value = "task-1777092" [ 1349.774331] env[62619]: _type = "Task" [ 1349.774331] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.784085] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.792241] env[62619]: DEBUG nova.compute.manager [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1349.825384] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777088, 'name': Rename_Task, 'duration_secs': 0.428033} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.825542] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1349.825787] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-547b2bf0-49db-4e35-9f7c-ba14c2f4129c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.833598] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1349.833598] env[62619]: value = "task-1777093" [ 1349.833598] env[62619]: _type = "Task" [ 1349.833598] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.845463] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777093, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.858161] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777086, 'name': ReconfigVM_Task, 'duration_secs': 0.562757} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.859825] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Reconfigured VM instance instance-00000003 to attach disk [datastore1] b334cb41-5ddf-4545-8e2a-97c4d1de7cbf/b334cb41-5ddf-4545-8e2a-97c4d1de7cbf.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1349.859825] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f3b36f4-18aa-4508-a9a1-6f2d9126719d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.864921] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1349.864921] env[62619]: value = "task-1777094" [ 1349.864921] env[62619]: _type = "Task" [ 1349.864921] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.875232] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777094, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.014804] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5251bc2d-8d23-2351-7282-e8b448391e4e, 'name': SearchDatastore_Task, 'duration_secs': 0.065197} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.014804] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.014804] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4f08d36b-f26e-499e-a4be-d8cbb481a44d/4f08d36b-f26e-499e-a4be-d8cbb481a44d.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1350.015314] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85453f6d-e5b3-45c4-8ed5-f02ee4b1c972 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.022748] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for the task: (returnval){ [ 1350.022748] env[62619]: value = "task-1777095" [ 1350.022748] env[62619]: _type = "Task" [ 1350.022748] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.037714] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.209825] env[62619]: ERROR nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [req-7352b82d-ca24-4470-88da-e1eaad03582e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7352b82d-ca24-4470-88da-e1eaad03582e"}]} [ 1350.234340] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1350.263931] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1350.264224] env[62619]: DEBUG nova.compute.provider_tree [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 16 to 17 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1350.264377] env[62619]: DEBUG nova.compute.provider_tree [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1350.272850] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777091, 'name': CreateVM_Task, 'duration_secs': 0.404998} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.272937] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1350.273663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.273887] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.274144] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1350.274417] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8eafab54-8247-4d86-bdab-9cb6ef2b0164 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.285474] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquiring lock "1847c5d8-16eb-4feb-8a09-24ad6728e59c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.285819] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lock "1847c5d8-16eb-4feb-8a09-24ad6728e59c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.288768] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for the task: (returnval){ [ 1350.288768] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52df8a7a-cb72-4b15-c470-6ed7626476d7" [ 1350.288768] env[62619]: _type = "Task" [ 1350.288768] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.292156] env[62619]: DEBUG oslo_vmware.api [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777092, 'name': PowerOnVM_Task, 'duration_secs': 0.513848} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.293385] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1350.299626] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.301784] env[62619]: INFO nova.compute.manager [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Took 16.06 seconds to spawn the instance on the hypervisor. [ 1350.303171] env[62619]: DEBUG nova.compute.manager [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1350.308544] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2264d26-576b-41f9-8df5-0193092721b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.319457] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52df8a7a-cb72-4b15-c470-6ed7626476d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.337714] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1350.350389] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777093, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.370567] env[62619]: DEBUG nova.network.neutron [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1350.383432] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777094, 'name': Rename_Task, 'duration_secs': 0.15726} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.383806] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1350.384130] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ccb1c152-d2a8-49d6-909c-185297627c7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.391816] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1350.391816] env[62619]: value = "task-1777096" [ 1350.391816] env[62619]: _type = "Task" [ 1350.391816] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.409108] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777096, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.507907] env[62619]: DEBUG nova.network.neutron [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Successfully created port: 9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1350.541783] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777095, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.620293] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb312d0-ffde-47fc-835d-b1ae36cf9f99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.628676] env[62619]: DEBUG nova.network.neutron [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Successfully updated port: b80ccdea-ed2b-4257-8c43-ae663d8b8bbc {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1350.632146] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09dd0aa-6a94-415f-8b5e-0ddd7eb72615 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.678583] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e261990-7bca-49e3-ad43-7468691a4d0f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.687398] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "fb231b38-950e-4c86-bfe5-4c10a304910f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.687721] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "fb231b38-950e-4c86-bfe5-4c10a304910f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.694115] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b849d2-f9ec-4e0c-856f-0dd0d674db9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.715603] env[62619]: DEBUG nova.compute.provider_tree [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1350.807197] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52df8a7a-cb72-4b15-c470-6ed7626476d7, 'name': SearchDatastore_Task, 'duration_secs': 0.093979} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.807524] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.807747] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1350.809227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.809227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.809227] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1350.809227] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ae16be6-dd5b-4295-87eb-5803634d1029 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.813180] env[62619]: DEBUG nova.compute.manager [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1350.816562] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1350.816944] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1350.817466] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d55a0701-e34b-45b6-bd54-e7fef87c51fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.823421] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for the task: (returnval){ [ 1350.823421] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526edb25-8b8f-df42-7e03-3823959d16df" [ 1350.823421] env[62619]: _type = "Task" [ 1350.823421] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.843327] env[62619]: INFO nova.compute.manager [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Took 20.83 seconds to build instance. [ 1350.847832] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526edb25-8b8f-df42-7e03-3823959d16df, 'name': SearchDatastore_Task, 'duration_secs': 0.010639} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.849599] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a20741e8-458e-4647-876c-3bbe2115e116 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.855514] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777093, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.858325] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for the task: (returnval){ [ 1350.858325] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52471eae-4e15-b780-1b7e-91c70a23044d" [ 1350.858325] env[62619]: _type = "Task" [ 1350.858325] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.866213] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52471eae-4e15-b780-1b7e-91c70a23044d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.908423] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777096, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.968154] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1350.969179] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1350.969430] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1350.969549] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1350.969685] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1350.969900] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1350.970162] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1350.971168] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1350.971461] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1350.971657] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1350.971824] env[62619]: DEBUG nova.virt.hardware [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1350.973085] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7441430b-1d8f-401e-be52-bbacc73203b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.981022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d621cb6e-2878-4b6c-a1b0-de3e6649fa12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.034806] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777095, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.762294} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.035242] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4f08d36b-f26e-499e-a4be-d8cbb481a44d/4f08d36b-f26e-499e-a4be-d8cbb481a44d.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1351.035399] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1351.035615] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-657940fc-c5b7-4d28-a220-58e106ff167e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.042515] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for the task: (returnval){ [ 1351.042515] env[62619]: value = "task-1777097" [ 1351.042515] env[62619]: _type = "Task" [ 1351.042515] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.051928] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.057948] env[62619]: DEBUG nova.network.neutron [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Updating instance_info_cache with network_info: [{"id": "b6faf342-2332-4eee-bdde-dafce4f0a856", "address": "fa:16:3e:b8:19:39", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6faf342-23", "ovs_interfaceid": "b6faf342-2332-4eee-bdde-dafce4f0a856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.139042] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "refresh_cache-4763e489-5aeb-4dc0-b327-b79a55afdfe3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.139204] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "refresh_cache-4763e489-5aeb-4dc0-b327-b79a55afdfe3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.139377] env[62619]: DEBUG nova.network.neutron [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1351.244408] env[62619]: ERROR nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [req-39b0757c-8081-486d-8380-3f430124f51a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-39b0757c-8081-486d-8380-3f430124f51a"}]} [ 1351.270454] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1351.301021] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1351.301021] env[62619]: DEBUG nova.compute.provider_tree [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1351.325629] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1351.352392] env[62619]: DEBUG oslo_concurrency.lockutils [None req-169ce928-92ce-4f2e-bcaf-2795eeb1d149 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "08c1fef9-40fc-4420-91de-fe911dea70f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.348s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.352986] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777093, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.353833] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1351.377946] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52471eae-4e15-b780-1b7e-91c70a23044d, 'name': SearchDatastore_Task, 'duration_secs': 0.039284} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.377946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.377946] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e98120b4-7916-4ce4-88ef-0c904852bb1f/e98120b4-7916-4ce4-88ef-0c904852bb1f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1351.377946] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-338444ea-6f74-4bc1-a533-35088b038982 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.392030] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for the task: (returnval){ [ 1351.392030] env[62619]: value = "task-1777098" [ 1351.392030] env[62619]: _type = "Task" [ 1351.392030] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.405463] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.415516] env[62619]: DEBUG oslo_vmware.api [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777096, 'name': PowerOnVM_Task, 'duration_secs': 0.742882} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.419792] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1351.419792] env[62619]: INFO nova.compute.manager [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Took 12.27 seconds to spawn the instance on the hypervisor. [ 1351.419792] env[62619]: DEBUG nova.compute.manager [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1351.428418] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db590a6c-5adc-4e2d-a9b0-7caffe72035c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.433702] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "4ee81568-ad9a-4ded-b6fe-15503d85968e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.433937] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "4ee81568-ad9a-4ded-b6fe-15503d85968e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.554524] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095784} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.554830] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1351.555941] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e90bd24-a938-4744-b6f6-641e19d85cd6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.560562] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "refresh_cache-ac03bcf3-61df-4557-8018-0ad54ef30f17" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.560794] env[62619]: DEBUG nova.compute.manager [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Instance network_info: |[{"id": "b6faf342-2332-4eee-bdde-dafce4f0a856", "address": "fa:16:3e:b8:19:39", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6faf342-23", "ovs_interfaceid": "b6faf342-2332-4eee-bdde-dafce4f0a856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1351.561685] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:19:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6faf342-2332-4eee-bdde-dafce4f0a856', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1351.570250] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Creating folder: Project (e471e7b32b154c1db2eac990fd11e539). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1351.584967] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6720a9b-f0a1-48f8-8f12-437bab9d6b32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.596725] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 4f08d36b-f26e-499e-a4be-d8cbb481a44d/4f08d36b-f26e-499e-a4be-d8cbb481a44d.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1351.597347] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d331470-922d-48cb-a540-932123a9faa8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.619475] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for the task: (returnval){ [ 1351.619475] env[62619]: value = "task-1777100" [ 1351.619475] env[62619]: _type = "Task" [ 1351.619475] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.620989] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Created folder: Project (e471e7b32b154c1db2eac990fd11e539) in parent group-v368875. [ 1351.620989] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Creating folder: Instances. Parent ref: group-v368891. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1351.624300] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10b75bd3-53c8-4936-be7f-00fe59e66b05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.631125] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777100, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.635199] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Created folder: Instances in parent group-v368891. [ 1351.635199] env[62619]: DEBUG oslo.service.loopingcall [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1351.635199] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1351.635199] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f5938d7-18c4-46ae-86b7-fbede09f8ae4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.657250] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1351.657250] env[62619]: value = "task-1777102" [ 1351.657250] env[62619]: _type = "Task" [ 1351.657250] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.665161] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777102, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.680083] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4481446-415f-4429-8a06-7784684b564c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.688407] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e2423a-d352-45d4-8057-7bdf5e947fc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.721037] env[62619]: DEBUG nova.network.neutron [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1351.723500] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fd7e6c-47f0-462f-bf7f-3f51503d8d4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.731176] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88546c21-d61b-498e-ae1f-3dd389ff5ad1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.745686] env[62619]: DEBUG nova.compute.provider_tree [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1351.850859] env[62619]: DEBUG oslo_vmware.api [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777093, 'name': PowerOnVM_Task, 'duration_secs': 1.608718} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.851468] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1351.851708] env[62619]: INFO nova.compute.manager [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Took 14.99 seconds to spawn the instance on the hypervisor. [ 1351.851868] env[62619]: DEBUG nova.compute.manager [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1351.852740] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf2d4aa-9046-4629-9542-145296b037a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.860994] env[62619]: DEBUG nova.compute.manager [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1351.906469] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777098, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.956294] env[62619]: INFO nova.compute.manager [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Took 18.52 seconds to build instance. [ 1352.132499] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777100, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.167755] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777102, 'name': CreateVM_Task, 'duration_secs': 0.413635} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.167963] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1352.169071] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.169071] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.169336] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1352.169605] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd81bfe1-b95f-43d1-83b9-6602dca98bc4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.175851] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1352.175851] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528aa538-a0b3-5165-b2fb-ee9480475361" [ 1352.175851] env[62619]: _type = "Task" [ 1352.175851] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.185769] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528aa538-a0b3-5165-b2fb-ee9480475361, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.270548] env[62619]: ERROR nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [req-bd9caa43-9959-4406-8731-0ea873c1924f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bd9caa43-9959-4406-8731-0ea873c1924f"}]} [ 1352.288214] env[62619]: DEBUG nova.network.neutron [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Updating instance_info_cache with network_info: [{"id": "b80ccdea-ed2b-4257-8c43-ae663d8b8bbc", "address": "fa:16:3e:2d:ca:43", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb80ccdea-ed", "ovs_interfaceid": "b80ccdea-ed2b-4257-8c43-ae663d8b8bbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.302257] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1352.311146] env[62619]: DEBUG nova.compute.manager [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Received event network-vif-plugged-0c0de74a-465b-4ba6-bc5c-860a32c90ff1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1352.312304] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Acquiring lock "e98120b4-7916-4ce4-88ef-0c904852bb1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.313064] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Lock "e98120b4-7916-4ce4-88ef-0c904852bb1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.315027] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Lock "e98120b4-7916-4ce4-88ef-0c904852bb1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.315027] env[62619]: DEBUG nova.compute.manager [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] No waiting events found dispatching network-vif-plugged-0c0de74a-465b-4ba6-bc5c-860a32c90ff1 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1352.316253] env[62619]: WARNING nova.compute.manager [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Received unexpected event network-vif-plugged-0c0de74a-465b-4ba6-bc5c-860a32c90ff1 for instance with vm_state building and task_state spawning. [ 1352.316253] env[62619]: DEBUG nova.compute.manager [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Received event network-changed-0c0de74a-465b-4ba6-bc5c-860a32c90ff1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1352.316253] env[62619]: DEBUG nova.compute.manager [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Refreshing instance network info cache due to event network-changed-0c0de74a-465b-4ba6-bc5c-860a32c90ff1. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1352.316253] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Acquiring lock "refresh_cache-e98120b4-7916-4ce4-88ef-0c904852bb1f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.318029] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Acquired lock "refresh_cache-e98120b4-7916-4ce4-88ef-0c904852bb1f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.318029] env[62619]: DEBUG nova.network.neutron [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Refreshing network info cache for port 0c0de74a-465b-4ba6-bc5c-860a32c90ff1 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1352.339012] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1352.341197] env[62619]: DEBUG nova.compute.provider_tree [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1352.350643] env[62619]: DEBUG nova.compute.manager [req-01530ba7-083a-4010-83e5-65fc77179a84 req-61842a2b-327c-4d08-be29-8825f1025832 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Received event network-vif-plugged-b80ccdea-ed2b-4257-8c43-ae663d8b8bbc {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1352.350643] env[62619]: DEBUG oslo_concurrency.lockutils [req-01530ba7-083a-4010-83e5-65fc77179a84 req-61842a2b-327c-4d08-be29-8825f1025832 service nova] Acquiring lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.351013] env[62619]: DEBUG oslo_concurrency.lockutils [req-01530ba7-083a-4010-83e5-65fc77179a84 req-61842a2b-327c-4d08-be29-8825f1025832 service nova] Lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.351206] env[62619]: DEBUG oslo_concurrency.lockutils [req-01530ba7-083a-4010-83e5-65fc77179a84 req-61842a2b-327c-4d08-be29-8825f1025832 service nova] Lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.352198] env[62619]: DEBUG nova.compute.manager [req-01530ba7-083a-4010-83e5-65fc77179a84 req-61842a2b-327c-4d08-be29-8825f1025832 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] No waiting events found dispatching network-vif-plugged-b80ccdea-ed2b-4257-8c43-ae663d8b8bbc {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1352.352198] env[62619]: WARNING nova.compute.manager [req-01530ba7-083a-4010-83e5-65fc77179a84 req-61842a2b-327c-4d08-be29-8825f1025832 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Received unexpected event network-vif-plugged-b80ccdea-ed2b-4257-8c43-ae663d8b8bbc for instance with vm_state building and task_state spawning. [ 1352.367142] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1352.382979] env[62619]: INFO nova.compute.manager [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Took 19.80 seconds to build instance. [ 1352.406485] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.407551] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1352.415817] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777098, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.937607} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.416219] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e98120b4-7916-4ce4-88ef-0c904852bb1f/e98120b4-7916-4ce4-88ef-0c904852bb1f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1352.416614] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1352.416971] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f74adbca-0c4e-466f-98ec-38c1af23d415 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.427380] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for the task: (returnval){ [ 1352.427380] env[62619]: value = "task-1777103" [ 1352.427380] env[62619]: _type = "Task" [ 1352.427380] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.440099] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777103, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.458923] env[62619]: DEBUG oslo_concurrency.lockutils [None req-42398909-7151-4da4-bd77-37df5c6a387b tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.035s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.596191] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquiring lock "f46de981-1f04-4baf-874c-de1b95d16f9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.596191] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lock "f46de981-1f04-4baf-874c-de1b95d16f9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.635517] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777100, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.692295] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528aa538-a0b3-5165-b2fb-ee9480475361, 'name': SearchDatastore_Task, 'duration_secs': 0.077973} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.692551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.692861] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1352.693062] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.693231] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.693674] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1352.693956] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c464626-be2b-4cd0-a9c5-b05ad90cbb0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.705574] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1352.705574] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1352.706666] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b58f188b-5d64-4de3-befc-a072a35d4410 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.712253] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1352.712253] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e71c1c-e724-9ac1-22b2-d0ccc6aa63a9" [ 1352.712253] env[62619]: _type = "Task" [ 1352.712253] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.723315] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e71c1c-e724-9ac1-22b2-d0ccc6aa63a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.754627] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63843f3-1714-413b-af22-f0075cb0ef99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.763905] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9528a9-1d2b-4198-99d9-972e24fcb9b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.807020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "refresh_cache-4763e489-5aeb-4dc0-b327-b79a55afdfe3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.807020] env[62619]: DEBUG nova.compute.manager [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Instance network_info: |[{"id": "b80ccdea-ed2b-4257-8c43-ae663d8b8bbc", "address": "fa:16:3e:2d:ca:43", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb80ccdea-ed", "ovs_interfaceid": "b80ccdea-ed2b-4257-8c43-ae663d8b8bbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1352.807260] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:ca:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b80ccdea-ed2b-4257-8c43-ae663d8b8bbc', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1352.814514] env[62619]: DEBUG oslo.service.loopingcall [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1352.815239] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e532eb-08dd-4355-b25f-f13614c2eab0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.820890] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1352.821171] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8521f8ed-b254-4b83-856a-b174fd4854cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.848426] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72796596-fd1c-4871-8e17-14cf848a5d81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.853866] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1352.853866] env[62619]: value = "task-1777104" [ 1352.853866] env[62619]: _type = "Task" [ 1352.853866] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.865839] env[62619]: DEBUG nova.compute.provider_tree [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1352.872466] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777104, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.886056] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39febed6-149d-4385-9524-8ebab654e006 tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "aa576459-65bf-4b16-ad1d-0930497522eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.311s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.941703] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777103, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147182} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.941997] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1352.942926] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de918a7-91eb-41c6-8881-58c00e85558a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.962961] env[62619]: DEBUG nova.compute.manager [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1352.975314] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] e98120b4-7916-4ce4-88ef-0c904852bb1f/e98120b4-7916-4ce4-88ef-0c904852bb1f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1352.979247] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f0cc7f8-9485-48bc-9875-bcc1fc13b891 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.999787] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for the task: (returnval){ [ 1352.999787] env[62619]: value = "task-1777105" [ 1352.999787] env[62619]: _type = "Task" [ 1352.999787] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.009471] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777105, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.136199] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777100, 'name': ReconfigVM_Task, 'duration_secs': 1.038076} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.136878] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 4f08d36b-f26e-499e-a4be-d8cbb481a44d/4f08d36b-f26e-499e-a4be-d8cbb481a44d.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1353.137888] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-089c7900-423f-479b-9380-40bae95421c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.150861] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for the task: (returnval){ [ 1353.150861] env[62619]: value = "task-1777106" [ 1353.150861] env[62619]: _type = "Task" [ 1353.150861] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.162249] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777106, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.224739] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e71c1c-e724-9ac1-22b2-d0ccc6aa63a9, 'name': SearchDatastore_Task, 'duration_secs': 0.02177} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.224739] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59b84e3a-a200-40c2-81a7-f85bac18af68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.230363] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1353.230363] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e6a95c-8828-2387-65a2-205cec54507c" [ 1353.230363] env[62619]: _type = "Task" [ 1353.230363] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.239698] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e6a95c-8828-2387-65a2-205cec54507c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.329271] env[62619]: DEBUG nova.network.neutron [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Successfully updated port: 9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1353.387974] env[62619]: DEBUG nova.compute.manager [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1353.392392] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777104, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.444141] env[62619]: DEBUG nova.scheduler.client.report [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 19 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1353.444141] env[62619]: DEBUG nova.compute.provider_tree [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 19 to 20 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1353.444141] env[62619]: DEBUG nova.compute.provider_tree [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1353.522925] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777105, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.552158] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.662275] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777106, 'name': Rename_Task, 'duration_secs': 0.247895} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.663925] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1353.664661] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7e5863b-9280-4682-a8ae-34c851f066f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.672161] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for the task: (returnval){ [ 1353.672161] env[62619]: value = "task-1777107" [ 1353.672161] env[62619]: _type = "Task" [ 1353.672161] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.682744] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777107, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.743974] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e6a95c-8828-2387-65a2-205cec54507c, 'name': SearchDatastore_Task, 'duration_secs': 0.015116} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.743974] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.743974] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1353.744791] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b8b69ef-0e0e-40cd-ab09-fc1b2b77029d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.751502] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1353.751502] env[62619]: value = "task-1777108" [ 1353.751502] env[62619]: _type = "Task" [ 1353.751502] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.752430] env[62619]: DEBUG nova.network.neutron [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Updated VIF entry in instance network info cache for port 0c0de74a-465b-4ba6-bc5c-860a32c90ff1. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1353.752746] env[62619]: DEBUG nova.network.neutron [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Updating instance_info_cache with network_info: [{"id": "0c0de74a-465b-4ba6-bc5c-860a32c90ff1", "address": "fa:16:3e:62:5d:92", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c0de74a-46", "ovs_interfaceid": "0c0de74a-465b-4ba6-bc5c-860a32c90ff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.764740] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.838365] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquiring lock "refresh_cache-28a8485c-fc0d-4fd0-8be9-37c49caf89b8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.838365] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquired lock "refresh_cache-28a8485c-fc0d-4fd0-8be9-37c49caf89b8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.838365] env[62619]: DEBUG nova.network.neutron [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1353.872227] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777104, 'name': CreateVM_Task, 'duration_secs': 0.537724} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.872302] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1353.873694] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.873694] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.873694] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1353.873694] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75cf340e-24c2-4922-b823-008b9755e831 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.878666] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1353.878666] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521cc1cc-e4ee-d77f-5128-0b1d973e3409" [ 1353.878666] env[62619]: _type = "Task" [ 1353.878666] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.889986] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521cc1cc-e4ee-d77f-5128-0b1d973e3409, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.927946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.949884] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.186s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.950526] env[62619]: DEBUG nova.compute.manager [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1353.957558] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 9.738s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.019273] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777105, 'name': ReconfigVM_Task, 'duration_secs': 0.581593} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.019273] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Reconfigured VM instance instance-00000005 to attach disk [datastore1] e98120b4-7916-4ce4-88ef-0c904852bb1f/e98120b4-7916-4ce4-88ef-0c904852bb1f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1354.019896] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6771d824-b0d0-45e8-bd6b-4511a19bdc73 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.028362] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for the task: (returnval){ [ 1354.028362] env[62619]: value = "task-1777109" [ 1354.028362] env[62619]: _type = "Task" [ 1354.028362] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.043460] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777109, 'name': Rename_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.193023] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777107, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.260377] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Releasing lock "refresh_cache-e98120b4-7916-4ce4-88ef-0c904852bb1f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.260701] env[62619]: DEBUG nova.compute.manager [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Received event network-vif-plugged-b6faf342-2332-4eee-bdde-dafce4f0a856 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1354.260833] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Acquiring lock "ac03bcf3-61df-4557-8018-0ad54ef30f17-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.261164] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Lock "ac03bcf3-61df-4557-8018-0ad54ef30f17-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.261431] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Lock "ac03bcf3-61df-4557-8018-0ad54ef30f17-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.261623] env[62619]: DEBUG nova.compute.manager [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] No waiting events found dispatching network-vif-plugged-b6faf342-2332-4eee-bdde-dafce4f0a856 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1354.261804] env[62619]: WARNING nova.compute.manager [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Received unexpected event network-vif-plugged-b6faf342-2332-4eee-bdde-dafce4f0a856 for instance with vm_state building and task_state spawning. [ 1354.261979] env[62619]: DEBUG nova.compute.manager [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Received event network-changed-b6faf342-2332-4eee-bdde-dafce4f0a856 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1354.262237] env[62619]: DEBUG nova.compute.manager [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Refreshing instance network info cache due to event network-changed-b6faf342-2332-4eee-bdde-dafce4f0a856. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1354.262517] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Acquiring lock "refresh_cache-ac03bcf3-61df-4557-8018-0ad54ef30f17" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.262594] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Acquired lock "refresh_cache-ac03bcf3-61df-4557-8018-0ad54ef30f17" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.262721] env[62619]: DEBUG nova.network.neutron [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Refreshing network info cache for port b6faf342-2332-4eee-bdde-dafce4f0a856 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1354.272283] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777108, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.389373] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521cc1cc-e4ee-d77f-5128-0b1d973e3409, 'name': SearchDatastore_Task, 'duration_secs': 0.012429} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.389911] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.390265] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1354.390949] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.391369] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.391666] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1354.392084] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26e2c388-209c-45ad-8915-dd3ddc22036e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.409180] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1354.410130] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1354.411860] env[62619]: DEBUG nova.network.neutron [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1354.413838] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cdb9278-28ce-46f1-8890-7f20dd256d07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.427184] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1354.427184] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5290163f-35cd-d0a5-edb8-917f2dc31d64" [ 1354.427184] env[62619]: _type = "Task" [ 1354.427184] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.440307] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5290163f-35cd-d0a5-edb8-917f2dc31d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.462992] env[62619]: DEBUG nova.compute.utils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1354.468956] env[62619]: DEBUG nova.compute.manager [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1354.469194] env[62619]: DEBUG nova.network.neutron [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1354.540427] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777109, 'name': Rename_Task, 'duration_secs': 0.235598} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.540427] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1354.540427] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b66fa1d2-bb5d-4532-9509-5bf510c2de00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.547372] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for the task: (returnval){ [ 1354.547372] env[62619]: value = "task-1777110" [ 1354.547372] env[62619]: _type = "Task" [ 1354.547372] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.556329] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777110, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.683653] env[62619]: DEBUG oslo_vmware.api [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777107, 'name': PowerOnVM_Task, 'duration_secs': 0.871543} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.684415] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1354.684415] env[62619]: INFO nova.compute.manager [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Took 13.14 seconds to spawn the instance on the hypervisor. [ 1354.684415] env[62619]: DEBUG nova.compute.manager [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1354.685105] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9994e6a6-a553-4e0b-bc25-822de69151c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.736405] env[62619]: DEBUG nova.policy [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '687ed3ed66ba46b4be99fe8db56e4088', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12de0928b20e47a3a5de58d191ffcf5a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1354.764044] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777108, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590679} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.765812] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1354.765812] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1354.766699] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3facd2fd-2f6c-4aed-8be7-53345bcddf19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.774325] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1354.774325] env[62619]: value = "task-1777111" [ 1354.774325] env[62619]: _type = "Task" [ 1354.774325] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.783190] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.925932] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "08c1fef9-40fc-4420-91de-fe911dea70f7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.926250] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "08c1fef9-40fc-4420-91de-fe911dea70f7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.926501] env[62619]: DEBUG nova.compute.manager [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1354.927432] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc18183-644d-4543-9bf9-cc71645cec8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.945531] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5290163f-35cd-d0a5-edb8-917f2dc31d64, 'name': SearchDatastore_Task, 'duration_secs': 0.017147} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.945531] env[62619]: DEBUG nova.compute.manager [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1354.945531] env[62619]: DEBUG nova.objects.instance [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lazy-loading 'flavor' on Instance uuid 08c1fef9-40fc-4420-91de-fe911dea70f7 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1354.946771] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f7f4e47-42e1-49d0-b314-70988e4c0bb6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.958019] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1354.958019] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eb78ad-4bea-f763-b804-8dc1c46383ae" [ 1354.958019] env[62619]: _type = "Task" [ 1354.958019] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.965282] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eb78ad-4bea-f763-b804-8dc1c46383ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.973314] env[62619]: DEBUG nova.compute.manager [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1355.004076] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 08c1fef9-40fc-4420-91de-fe911dea70f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1355.004076] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance aa576459-65bf-4b16-ad1d-0930497522eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1355.004076] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance b334cb41-5ddf-4545-8e2a-97c4d1de7cbf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1355.004076] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4f08d36b-f26e-499e-a4be-d8cbb481a44d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1355.004291] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e98120b4-7916-4ce4-88ef-0c904852bb1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1355.004291] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ac03bcf3-61df-4557-8018-0ad54ef30f17 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1355.004291] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4763e489-5aeb-4dc0-b327-b79a55afdfe3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1355.004291] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 28a8485c-fc0d-4fd0-8be9-37c49caf89b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1355.004402] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e0e25ddd-3692-480f-bfa0-212741c0d882 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1355.028864] env[62619]: DEBUG nova.network.neutron [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Updating instance_info_cache with network_info: [{"id": "9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff", "address": "fa:16:3e:a1:c2:90", "network": {"id": "94fc632f-d96f-4eaf-8938-610545cddd66", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1236570726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "2ef3f0ff9d6b41b2b73a23548c9bdf5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f1844c8-15", "ovs_interfaceid": "9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.035768] env[62619]: DEBUG nova.network.neutron [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Updated VIF entry in instance network info cache for port b6faf342-2332-4eee-bdde-dafce4f0a856. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1355.037045] env[62619]: DEBUG nova.network.neutron [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Updating instance_info_cache with network_info: [{"id": "b6faf342-2332-4eee-bdde-dafce4f0a856", "address": "fa:16:3e:b8:19:39", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6faf342-23", "ovs_interfaceid": "b6faf342-2332-4eee-bdde-dafce4f0a856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.064658] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777110, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.204869] env[62619]: INFO nova.compute.manager [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Took 21.37 seconds to build instance. [ 1355.287774] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120798} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.287774] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1355.289138] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97530e8b-626f-4943-83b9-bd618a0e66f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.319720] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1355.319720] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec04f195-a825-4898-9751-b6674be72576 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.348111] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1355.348111] env[62619]: value = "task-1777112" [ 1355.348111] env[62619]: _type = "Task" [ 1355.348111] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.358768] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777112, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.473939] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eb78ad-4bea-f763-b804-8dc1c46383ae, 'name': SearchDatastore_Task, 'duration_secs': 0.033813} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.475052] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.475052] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4763e489-5aeb-4dc0-b327-b79a55afdfe3/4763e489-5aeb-4dc0-b327-b79a55afdfe3.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1355.475222] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7dbfa955-3d2a-4662-b034-461983393b9d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.486620] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1355.486620] env[62619]: value = "task-1777113" [ 1355.486620] env[62619]: _type = "Task" [ 1355.486620] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.501262] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777113, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.509271] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance aa4906f1-e801-4df0-819e-8c5fb5930fb5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1355.534440] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Releasing lock "refresh_cache-28a8485c-fc0d-4fd0-8be9-37c49caf89b8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.534757] env[62619]: DEBUG nova.compute.manager [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Instance network_info: |[{"id": "9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff", "address": "fa:16:3e:a1:c2:90", "network": {"id": "94fc632f-d96f-4eaf-8938-610545cddd66", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1236570726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "2ef3f0ff9d6b41b2b73a23548c9bdf5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f1844c8-15", "ovs_interfaceid": "9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1355.535726] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:c2:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1355.543450] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Creating folder: Project (2ef3f0ff9d6b41b2b73a23548c9bdf5b). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1355.545155] env[62619]: DEBUG oslo_concurrency.lockutils [req-30e09578-3e35-44a3-a151-61fd49fcb1bb req-6da2628c-cd2a-4277-a489-e039ddf42a3f service nova] Releasing lock "refresh_cache-ac03bcf3-61df-4557-8018-0ad54ef30f17" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.545514] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56003e04-ce50-4594-b0ad-29954c68a633 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.555433] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Created folder: Project (2ef3f0ff9d6b41b2b73a23548c9bdf5b) in parent group-v368875. [ 1355.555648] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Creating folder: Instances. Parent ref: group-v368895. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1355.559022] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6e7115e-43ca-4847-891e-b12112c3801a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.560659] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777110, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.567592] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Created folder: Instances in parent group-v368895. [ 1355.567863] env[62619]: DEBUG oslo.service.loopingcall [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1355.568141] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1355.568280] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77340c0e-eb1c-49d5-81ea-802d6f99ab1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.586770] env[62619]: DEBUG nova.network.neutron [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Successfully created port: 149b04f5-c179-4c37-9c3d-7293f5342eca {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1355.595941] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1355.595941] env[62619]: value = "task-1777116" [ 1355.595941] env[62619]: _type = "Task" [ 1355.595941] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.609369] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777116, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.710199] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c527952-42ea-4a21-b893-68125e26e960 tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.887s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.859669] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777112, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.962018] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1355.962018] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82aa4481-2569-42da-a3ce-25c977740b32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.970941] env[62619]: DEBUG oslo_vmware.api [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1355.970941] env[62619]: value = "task-1777117" [ 1355.970941] env[62619]: _type = "Task" [ 1355.970941] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.986589] env[62619]: DEBUG oslo_vmware.api [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777117, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.987894] env[62619]: DEBUG nova.compute.manager [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1356.001580] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777113, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.014954] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ed34ae20-a891-45aa-8124-f36f264937f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1356.027781] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1356.028603] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1356.028875] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1356.029252] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1356.029417] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1356.029616] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1356.029962] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1356.030045] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1356.030232] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1356.030405] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1356.030591] env[62619]: DEBUG nova.virt.hardware [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1356.031696] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cad8da8-4345-4156-ba41-a2998d07f1a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.045276] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749a9417-cd96-477b-8eb1-09c50e47b358 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.058722] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777110, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.112342] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777116, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.213742] env[62619]: DEBUG nova.compute.manager [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1356.359251] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777112, 'name': ReconfigVM_Task, 'duration_secs': 0.774642} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.359620] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Reconfigured VM instance instance-00000006 to attach disk [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1356.360268] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7408a7c1-a5b8-4941-aad9-91da640e9e60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.370765] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1356.370765] env[62619]: value = "task-1777118" [ 1356.370765] env[62619]: _type = "Task" [ 1356.370765] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.388217] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777118, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.462265] env[62619]: DEBUG nova.compute.manager [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Received event network-vif-plugged-9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1356.462483] env[62619]: DEBUG oslo_concurrency.lockutils [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] Acquiring lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.462693] env[62619]: DEBUG oslo_concurrency.lockutils [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] Lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.462838] env[62619]: DEBUG oslo_concurrency.lockutils [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] Lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.463047] env[62619]: DEBUG nova.compute.manager [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] No waiting events found dispatching network-vif-plugged-9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1356.463176] env[62619]: WARNING nova.compute.manager [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Received unexpected event network-vif-plugged-9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff for instance with vm_state building and task_state spawning. [ 1356.463362] env[62619]: DEBUG nova.compute.manager [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Received event network-changed-9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1356.463659] env[62619]: DEBUG nova.compute.manager [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Refreshing instance network info cache due to event network-changed-9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1356.464109] env[62619]: DEBUG oslo_concurrency.lockutils [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] Acquiring lock "refresh_cache-28a8485c-fc0d-4fd0-8be9-37c49caf89b8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.464109] env[62619]: DEBUG oslo_concurrency.lockutils [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] Acquired lock "refresh_cache-28a8485c-fc0d-4fd0-8be9-37c49caf89b8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.464109] env[62619]: DEBUG nova.network.neutron [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Refreshing network info cache for port 9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1356.484530] env[62619]: DEBUG oslo_vmware.api [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777117, 'name': PowerOffVM_Task, 'duration_secs': 0.242213} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.485686] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1356.485881] env[62619]: DEBUG nova.compute.manager [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1356.487152] env[62619]: DEBUG nova.compute.manager [req-df9751bb-ba36-40e7-ad86-11c569103bde req-365b11a1-e0eb-4869-861a-e71257d95f39 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Received event network-changed-b80ccdea-ed2b-4257-8c43-ae663d8b8bbc {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1356.488345] env[62619]: DEBUG nova.compute.manager [req-df9751bb-ba36-40e7-ad86-11c569103bde req-365b11a1-e0eb-4869-861a-e71257d95f39 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Refreshing instance network info cache due to event network-changed-b80ccdea-ed2b-4257-8c43-ae663d8b8bbc. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1356.488572] env[62619]: DEBUG oslo_concurrency.lockutils [req-df9751bb-ba36-40e7-ad86-11c569103bde req-365b11a1-e0eb-4869-861a-e71257d95f39 service nova] Acquiring lock "refresh_cache-4763e489-5aeb-4dc0-b327-b79a55afdfe3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.488710] env[62619]: DEBUG oslo_concurrency.lockutils [req-df9751bb-ba36-40e7-ad86-11c569103bde req-365b11a1-e0eb-4869-861a-e71257d95f39 service nova] Acquired lock "refresh_cache-4763e489-5aeb-4dc0-b327-b79a55afdfe3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.488871] env[62619]: DEBUG nova.network.neutron [req-df9751bb-ba36-40e7-ad86-11c569103bde req-365b11a1-e0eb-4869-861a-e71257d95f39 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Refreshing network info cache for port b80ccdea-ed2b-4257-8c43-ae663d8b8bbc {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1356.490882] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69485f1c-08cb-4801-bf63-37aa3a70b8a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.510482] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777113, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.749781} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.510933] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4763e489-5aeb-4dc0-b327-b79a55afdfe3/4763e489-5aeb-4dc0-b327-b79a55afdfe3.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1356.514251] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1356.514789] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cdfaae47-4447-41b2-b770-52eb8278bb4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.520355] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 1847c5d8-16eb-4feb-8a09-24ad6728e59c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1356.525018] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1356.525018] env[62619]: value = "task-1777119" [ 1356.525018] env[62619]: _type = "Task" [ 1356.525018] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.536932] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777119, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.562401] env[62619]: DEBUG oslo_vmware.api [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777110, 'name': PowerOnVM_Task, 'duration_secs': 1.608655} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.563133] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1356.563133] env[62619]: INFO nova.compute.manager [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Took 12.68 seconds to spawn the instance on the hypervisor. [ 1356.563133] env[62619]: DEBUG nova.compute.manager [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1356.563985] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939f22c4-e133-45d1-8411-c31172b47e94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.617726] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777116, 'name': CreateVM_Task, 'duration_secs': 0.603775} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.617854] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1356.619326] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.619326] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.619326] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1356.619326] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85f0010a-a484-4be9-8359-f8aa09512717 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.624317] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for the task: (returnval){ [ 1356.624317] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da3dd2-6494-6fb0-f320-63e5cb7b2491" [ 1356.624317] env[62619]: _type = "Task" [ 1356.624317] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.641213] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da3dd2-6494-6fb0-f320-63e5cb7b2491, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.754705] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.809550] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "d7b2d831-b2ae-445c-887b-290171ae5d80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.809757] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "d7b2d831-b2ae-445c-887b-290171ae5d80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.843038] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "312aed5b-a66e-4428-ac1b-483dc2b38291" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.843333] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "312aed5b-a66e-4428-ac1b-483dc2b38291" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.872883] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.873155] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.882618] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777118, 'name': Rename_Task, 'duration_secs': 0.301858} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.884158] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1356.884158] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8770b97-5557-4515-b849-31977d173020 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.889311] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1356.889311] env[62619]: value = "task-1777120" [ 1356.889311] env[62619]: _type = "Task" [ 1356.889311] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.897459] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777120, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.016477] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e65911d6-729a-433c-8d9b-60ccf2cef4f6 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "08c1fef9-40fc-4420-91de-fe911dea70f7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.090s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.024889] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance fb231b38-950e-4c86-bfe5-4c10a304910f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1357.043768] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777119, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.205019} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.043768] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1357.043768] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51a46e1-e2d6-43fd-9ed1-56ab44f2f7f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.071718] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 4763e489-5aeb-4dc0-b327-b79a55afdfe3/4763e489-5aeb-4dc0-b327-b79a55afdfe3.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1357.072407] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a14628c2-5a34-4c3b-88ef-f0e41bb2ba5f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.097861] env[62619]: INFO nova.compute.manager [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Took 23.14 seconds to build instance. [ 1357.101018] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1357.101018] env[62619]: value = "task-1777121" [ 1357.101018] env[62619]: _type = "Task" [ 1357.101018] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.110840] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777121, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.140848] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da3dd2-6494-6fb0-f320-63e5cb7b2491, 'name': SearchDatastore_Task, 'duration_secs': 0.02054} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.141217] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.141458] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1357.141784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.141881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.142097] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1357.142784] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbd8aea2-2915-491e-b9d0-ea69d53c41a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.153022] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1357.153226] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1357.156680] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e07f0b9f-7067-45d8-8a17-355926aab952 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.161639] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for the task: (returnval){ [ 1357.161639] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c83d79-6f1c-5f45-cf9f-c69280f562a7" [ 1357.161639] env[62619]: _type = "Task" [ 1357.161639] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.172223] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c83d79-6f1c-5f45-cf9f-c69280f562a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.376530] env[62619]: DEBUG nova.network.neutron [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Updated VIF entry in instance network info cache for port 9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1357.376530] env[62619]: DEBUG nova.network.neutron [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Updating instance_info_cache with network_info: [{"id": "9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff", "address": "fa:16:3e:a1:c2:90", "network": {"id": "94fc632f-d96f-4eaf-8938-610545cddd66", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1236570726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ef3f0ff9d6b41b2b73a23548c9bdf5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f1844c8-15", "ovs_interfaceid": "9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.404268] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777120, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.532575] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4ee81568-ad9a-4ded-b6fe-15503d85968e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1357.600931] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e650642-4074-43f1-a7c4-275f23c78188 tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lock "e98120b4-7916-4ce4-88ef-0c904852bb1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.658s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.615966] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777121, 'name': ReconfigVM_Task, 'duration_secs': 0.45713} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.618419] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 4763e489-5aeb-4dc0-b327-b79a55afdfe3/4763e489-5aeb-4dc0-b327-b79a55afdfe3.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1357.618419] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dcaa448-b806-484f-8fc6-f73fef57951f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.624784] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1357.624784] env[62619]: value = "task-1777122" [ 1357.624784] env[62619]: _type = "Task" [ 1357.624784] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.636217] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777122, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.672092] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c83d79-6f1c-5f45-cf9f-c69280f562a7, 'name': SearchDatastore_Task, 'duration_secs': 0.018881} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.673021] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79b82221-a5dd-48c7-9a42-ce619d182309 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.678430] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for the task: (returnval){ [ 1357.678430] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b6ea6e-5eaf-6fc2-4059-5c26a4998dee" [ 1357.678430] env[62619]: _type = "Task" [ 1357.678430] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.687119] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b6ea6e-5eaf-6fc2-4059-5c26a4998dee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.787565] env[62619]: DEBUG nova.network.neutron [req-df9751bb-ba36-40e7-ad86-11c569103bde req-365b11a1-e0eb-4869-861a-e71257d95f39 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Updated VIF entry in instance network info cache for port b80ccdea-ed2b-4257-8c43-ae663d8b8bbc. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1357.787921] env[62619]: DEBUG nova.network.neutron [req-df9751bb-ba36-40e7-ad86-11c569103bde req-365b11a1-e0eb-4869-861a-e71257d95f39 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Updating instance_info_cache with network_info: [{"id": "b80ccdea-ed2b-4257-8c43-ae663d8b8bbc", "address": "fa:16:3e:2d:ca:43", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb80ccdea-ed", "ovs_interfaceid": "b80ccdea-ed2b-4257-8c43-ae663d8b8bbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.879498] env[62619]: DEBUG oslo_concurrency.lockutils [req-82d84752-86a2-4d21-8396-f639e6d02e20 req-5854b7ce-190b-427f-8820-a33e994504cb service nova] Releasing lock "refresh_cache-28a8485c-fc0d-4fd0-8be9-37c49caf89b8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.903440] env[62619]: DEBUG oslo_vmware.api [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777120, 'name': PowerOnVM_Task, 'duration_secs': 0.744915} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.904198] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1357.905718] env[62619]: INFO nova.compute.manager [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Took 11.72 seconds to spawn the instance on the hypervisor. [ 1357.906325] env[62619]: DEBUG nova.compute.manager [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1357.907947] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ed9388-64c1-498f-885c-6486c6322871 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.969034] env[62619]: DEBUG nova.network.neutron [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Successfully updated port: 149b04f5-c179-4c37-9c3d-7293f5342eca {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1358.037824] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance f46de981-1f04-4baf-874c-de1b95d16f9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1358.038089] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1358.038239] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1358.111568] env[62619]: DEBUG nova.compute.manager [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1358.142783] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777122, 'name': Rename_Task, 'duration_secs': 0.204984} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.147268] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1358.148879] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b59ba632-cafc-4aa8-8a75-cf5bddac7889 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.158166] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1358.158166] env[62619]: value = "task-1777123" [ 1358.158166] env[62619]: _type = "Task" [ 1358.158166] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.168483] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777123, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.197163] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b6ea6e-5eaf-6fc2-4059-5c26a4998dee, 'name': SearchDatastore_Task, 'duration_secs': 0.016939} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.197282] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.197931] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 28a8485c-fc0d-4fd0-8be9-37c49caf89b8/28a8485c-fc0d-4fd0-8be9-37c49caf89b8.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1358.197931] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1b428d9-7534-405a-a05d-c9f04849715c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.205487] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for the task: (returnval){ [ 1358.205487] env[62619]: value = "task-1777124" [ 1358.205487] env[62619]: _type = "Task" [ 1358.205487] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.215022] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.292897] env[62619]: DEBUG oslo_concurrency.lockutils [req-df9751bb-ba36-40e7-ad86-11c569103bde req-365b11a1-e0eb-4869-861a-e71257d95f39 service nova] Releasing lock "refresh_cache-4763e489-5aeb-4dc0-b327-b79a55afdfe3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.387235] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e021462-a535-435a-a42b-d47017402821 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.396530] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95450922-adfc-40b4-8801-daa1dbf7fb84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.443231] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1836769e-fd8b-464d-9e07-5ebbff02e0f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.446404] env[62619]: INFO nova.compute.manager [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Took 21.99 seconds to build instance. [ 1358.453303] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b967b9-4932-4393-bf53-083624feb233 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.472121] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1358.475415] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquiring lock "refresh_cache-e0e25ddd-3692-480f-bfa0-212741c0d882" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1358.475551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquired lock "refresh_cache-e0e25ddd-3692-480f-bfa0-212741c0d882" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.475697] env[62619]: DEBUG nova.network.neutron [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1358.644133] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.669213] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777123, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.696592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquiring lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.696823] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1358.715327] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.743289] env[62619]: DEBUG nova.compute.manager [None req-a7915f1b-2324-49b3-aaad-2972b194bf40 tempest-ServerDiagnosticsTest-356046188 tempest-ServerDiagnosticsTest-356046188-project-admin] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1358.745202] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e50666e-0110-4960-899b-0fb2963edadd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.752425] env[62619]: INFO nova.compute.manager [None req-a7915f1b-2324-49b3-aaad-2972b194bf40 tempest-ServerDiagnosticsTest-356046188 tempest-ServerDiagnosticsTest-356046188-project-admin] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Retrieving diagnostics [ 1358.753650] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9ca04d-b9ea-46e9-b799-f2fa7a36fe5f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.791473] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "e81c03f7-9c0e-46bd-9641-aced82038eca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.792227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "e81c03f7-9c0e-46bd-9641-aced82038eca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1358.852368] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Acquiring lock "aa576459-65bf-4b16-ad1d-0930497522eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.852664] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Lock "aa576459-65bf-4b16-ad1d-0930497522eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1358.853172] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Acquiring lock "aa576459-65bf-4b16-ad1d-0930497522eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.853172] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Lock "aa576459-65bf-4b16-ad1d-0930497522eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1358.853296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Lock "aa576459-65bf-4b16-ad1d-0930497522eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.855295] env[62619]: INFO nova.compute.manager [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Terminating instance [ 1358.949323] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0c0fdfef-0953-48a5-893b-a6a6d0c5db0d tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "ac03bcf3-61df-4557-8018-0ad54ef30f17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.502s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.976771] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1359.017570] env[62619]: DEBUG nova.network.neutron [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1359.019553] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquiring lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.019553] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.020266] env[62619]: INFO nova.compute.manager [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Rebooting instance [ 1359.173744] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777123, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.215950] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777124, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.263711] env[62619]: DEBUG nova.network.neutron [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Updating instance_info_cache with network_info: [{"id": "149b04f5-c179-4c37-9c3d-7293f5342eca", "address": "fa:16:3e:8a:0d:12", "network": {"id": "0ffdf557-31ac-4564-bd69-246c0a1e9f3a", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1534333533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12de0928b20e47a3a5de58d191ffcf5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap149b04f5-c1", "ovs_interfaceid": "149b04f5-c179-4c37-9c3d-7293f5342eca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.360871] env[62619]: DEBUG nova.compute.manager [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1359.365147] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1359.365147] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0238e1-b743-431f-9792-b18b9b833224 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.373406] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1359.373706] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a19abddf-77d2-4207-9197-b5c0f07af771 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.381322] env[62619]: DEBUG oslo_vmware.api [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Waiting for the task: (returnval){ [ 1359.381322] env[62619]: value = "task-1777125" [ 1359.381322] env[62619]: _type = "Task" [ 1359.381322] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.390303] env[62619]: DEBUG oslo_vmware.api [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Task: {'id': task-1777125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.455026] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1359.486668] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1359.487915] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.529s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.487915] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.706s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.488720] env[62619]: INFO nova.compute.claims [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1359.568355] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquiring lock "refresh_cache-b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.568355] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquired lock "refresh_cache-b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.568355] env[62619]: DEBUG nova.network.neutron [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1359.671447] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777123, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.719142] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777124, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.044474} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.719634] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 28a8485c-fc0d-4fd0-8be9-37c49caf89b8/28a8485c-fc0d-4fd0-8be9-37c49caf89b8.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1359.719961] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1359.720580] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-288fb5f1-9b51-4682-bad0-2bcf2588b845 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.727286] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for the task: (returnval){ [ 1359.727286] env[62619]: value = "task-1777126" [ 1359.727286] env[62619]: _type = "Task" [ 1359.727286] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.737820] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777126, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.767667] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Releasing lock "refresh_cache-e0e25ddd-3692-480f-bfa0-212741c0d882" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1359.768200] env[62619]: DEBUG nova.compute.manager [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Instance network_info: |[{"id": "149b04f5-c179-4c37-9c3d-7293f5342eca", "address": "fa:16:3e:8a:0d:12", "network": {"id": "0ffdf557-31ac-4564-bd69-246c0a1e9f3a", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1534333533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12de0928b20e47a3a5de58d191ffcf5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap149b04f5-c1", "ovs_interfaceid": "149b04f5-c179-4c37-9c3d-7293f5342eca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1359.768811] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:0d:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '149b04f5-c179-4c37-9c3d-7293f5342eca', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1359.779009] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Creating folder: Project (12de0928b20e47a3a5de58d191ffcf5a). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1359.779578] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1758a01d-6c20-4348-81ba-103e5a731599 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.793214] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Created folder: Project (12de0928b20e47a3a5de58d191ffcf5a) in parent group-v368875. [ 1359.793530] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Creating folder: Instances. Parent ref: group-v368898. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1359.793929] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61761cd0-4aa3-4dc4-9288-229f6b56f616 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.806706] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Created folder: Instances in parent group-v368898. [ 1359.806986] env[62619]: DEBUG oslo.service.loopingcall [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1359.807616] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1359.807616] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b18f351-f9e6-41c2-b846-da7e29d27eae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.827428] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1359.827428] env[62619]: value = "task-1777129" [ 1359.827428] env[62619]: _type = "Task" [ 1359.827428] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.837158] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777129, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.892969] env[62619]: DEBUG oslo_vmware.api [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Task: {'id': task-1777125, 'name': PowerOffVM_Task, 'duration_secs': 0.443333} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.892969] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1359.892969] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1359.893194] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33334610-c66b-4560-8bb0-1a1cdfebad62 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.974570] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1359.974570] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1359.974570] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Deleting the datastore file [datastore1] aa576459-65bf-4b16-ad1d-0930497522eb {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1359.975146] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de069c12-1460-43c2-ac88-f13222ca129d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.978912] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.982043] env[62619]: DEBUG oslo_vmware.api [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Waiting for the task: (returnval){ [ 1359.982043] env[62619]: value = "task-1777131" [ 1359.982043] env[62619]: _type = "Task" [ 1359.982043] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.986198] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.986440] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.991845] env[62619]: DEBUG oslo_vmware.api [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Task: {'id': task-1777131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.170725] env[62619]: DEBUG oslo_vmware.api [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777123, 'name': PowerOnVM_Task, 'duration_secs': 1.736703} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.170930] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1360.171600] env[62619]: INFO nova.compute.manager [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Took 11.69 seconds to spawn the instance on the hypervisor. [ 1360.171942] env[62619]: DEBUG nova.compute.manager [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1360.172922] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55ce461-5bff-4dd5-a21d-0e854a501dad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.239295] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777126, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11369} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.243019] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1360.243019] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5022084d-25da-4fb0-a42d-afd72990c837 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.269353] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 28a8485c-fc0d-4fd0-8be9-37c49caf89b8/28a8485c-fc0d-4fd0-8be9-37c49caf89b8.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1360.269718] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1c18749-c63a-4ae7-b37b-ecb9ae7bd5f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.292656] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for the task: (returnval){ [ 1360.292656] env[62619]: value = "task-1777132" [ 1360.292656] env[62619]: _type = "Task" [ 1360.292656] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.309647] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.339674] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777129, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.450635] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquiring lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.451236] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.495035] env[62619]: DEBUG oslo_vmware.api [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Task: {'id': task-1777131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.406855} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.495035] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1360.495035] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1360.495035] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1360.495035] env[62619]: INFO nova.compute.manager [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1360.495410] env[62619]: DEBUG oslo.service.loopingcall [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1360.495410] env[62619]: DEBUG nova.compute.manager [-] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1360.495410] env[62619]: DEBUG nova.network.neutron [-] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1360.694494] env[62619]: INFO nova.compute.manager [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Took 22.39 seconds to build instance. [ 1360.710326] env[62619]: DEBUG nova.compute.manager [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Received event network-vif-plugged-149b04f5-c179-4c37-9c3d-7293f5342eca {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1360.710820] env[62619]: DEBUG oslo_concurrency.lockutils [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] Acquiring lock "e0e25ddd-3692-480f-bfa0-212741c0d882-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.711055] env[62619]: DEBUG oslo_concurrency.lockutils [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] Lock "e0e25ddd-3692-480f-bfa0-212741c0d882-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.711266] env[62619]: DEBUG oslo_concurrency.lockutils [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] Lock "e0e25ddd-3692-480f-bfa0-212741c0d882-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1360.711441] env[62619]: DEBUG nova.compute.manager [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] No waiting events found dispatching network-vif-plugged-149b04f5-c179-4c37-9c3d-7293f5342eca {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1360.711601] env[62619]: WARNING nova.compute.manager [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Received unexpected event network-vif-plugged-149b04f5-c179-4c37-9c3d-7293f5342eca for instance with vm_state building and task_state spawning. [ 1360.713026] env[62619]: DEBUG nova.compute.manager [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Received event network-changed-149b04f5-c179-4c37-9c3d-7293f5342eca {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1360.713026] env[62619]: DEBUG nova.compute.manager [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Refreshing instance network info cache due to event network-changed-149b04f5-c179-4c37-9c3d-7293f5342eca. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1360.713026] env[62619]: DEBUG oslo_concurrency.lockutils [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] Acquiring lock "refresh_cache-e0e25ddd-3692-480f-bfa0-212741c0d882" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1360.713026] env[62619]: DEBUG oslo_concurrency.lockutils [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] Acquired lock "refresh_cache-e0e25ddd-3692-480f-bfa0-212741c0d882" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.713026] env[62619]: DEBUG nova.network.neutron [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Refreshing network info cache for port 149b04f5-c179-4c37-9c3d-7293f5342eca {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1360.739726] env[62619]: DEBUG nova.network.neutron [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Updating instance_info_cache with network_info: [{"id": "e9bd11b1-25ab-47d5-a138-85ec70cd7a3b", "address": "fa:16:3e:30:22:6a", "network": {"id": "b07fc3fc-86b1-4689-bc3e-430ec15c2d40", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1955054791-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e606dfc7f0742f48e37edbf8b5ca3bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9bd11b1-25", "ovs_interfaceid": "e9bd11b1-25ab-47d5-a138-85ec70cd7a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.802811] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.840504] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777129, 'name': CreateVM_Task, 'duration_secs': 0.71506} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.846018] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1360.846018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1360.846018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.846018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1360.846018] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c767b24a-f88e-42f2-b3d1-553ddbd58a27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.849616] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for the task: (returnval){ [ 1360.849616] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da0292-7a82-9b13-7af1-7566697f0349" [ 1360.849616] env[62619]: _type = "Task" [ 1360.849616] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.857020] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06a52a2-c066-4728-85fc-4ef8f17a9397 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.861053] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da0292-7a82-9b13-7af1-7566697f0349, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.865521] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b130ec7-e781-4a18-9878-f4977dddbcd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.898418] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e30dc8-b214-46c8-8140-76fa1c333ca6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.905811] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99913c4d-1fe0-4a0e-a16d-e4a127be72c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.924023] env[62619]: DEBUG nova.compute.provider_tree [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.099090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.102897] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.196579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1d1e2fce-0f10-4be6-bf44-8ef879c920f7 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.900s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.246445] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Releasing lock "refresh_cache-b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.302962] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777132, 'name': ReconfigVM_Task, 'duration_secs': 0.735638} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.303280] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 28a8485c-fc0d-4fd0-8be9-37c49caf89b8/28a8485c-fc0d-4fd0-8be9-37c49caf89b8.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1361.304068] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8d50eb1-dfc4-4b94-b32c-b49c35ad7082 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.310736] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for the task: (returnval){ [ 1361.310736] env[62619]: value = "task-1777133" [ 1361.310736] env[62619]: _type = "Task" [ 1361.310736] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.324755] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777133, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.367306] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da0292-7a82-9b13-7af1-7566697f0349, 'name': SearchDatastore_Task, 'duration_secs': 0.011124} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.371780] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.372247] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1361.373731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.373731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.375368] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1361.376404] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7342f248-b756-460e-9db6-495bb907f8d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.384605] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1361.384779] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1361.385524] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc8057c9-5ed4-484d-832c-24bfe6295d0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.390584] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for the task: (returnval){ [ 1361.390584] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528b3711-6d21-73b5-a330-7fcdec82c545" [ 1361.390584] env[62619]: _type = "Task" [ 1361.390584] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.398209] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528b3711-6d21-73b5-a330-7fcdec82c545, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.423723] env[62619]: DEBUG nova.scheduler.client.report [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1361.700967] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1361.721512] env[62619]: DEBUG nova.network.neutron [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Updated VIF entry in instance network info cache for port 149b04f5-c179-4c37-9c3d-7293f5342eca. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1361.721841] env[62619]: DEBUG nova.network.neutron [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Updating instance_info_cache with network_info: [{"id": "149b04f5-c179-4c37-9c3d-7293f5342eca", "address": "fa:16:3e:8a:0d:12", "network": {"id": "0ffdf557-31ac-4564-bd69-246c0a1e9f3a", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1534333533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12de0928b20e47a3a5de58d191ffcf5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap149b04f5-c1", "ovs_interfaceid": "149b04f5-c179-4c37-9c3d-7293f5342eca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.756020] env[62619]: DEBUG nova.compute.manager [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1361.756020] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c847ea8-3752-4210-b3f1-612a3487185c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.770519] env[62619]: DEBUG nova.network.neutron [-] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.822243] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777133, 'name': Rename_Task, 'duration_secs': 0.305305} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.822511] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1361.822752] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d73751e-0b43-43b1-aa32-a0dfa59bc3b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.829218] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for the task: (returnval){ [ 1361.829218] env[62619]: value = "task-1777134" [ 1361.829218] env[62619]: _type = "Task" [ 1361.829218] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.838507] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777134, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.900346] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528b3711-6d21-73b5-a330-7fcdec82c545, 'name': SearchDatastore_Task, 'duration_secs': 0.011522} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.901166] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea2798a8-8565-4df6-98a7-b94231169c54 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.907497] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for the task: (returnval){ [ 1361.907497] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52adf2ac-6432-d0db-c8f7-4afc1c39c41d" [ 1361.907497] env[62619]: _type = "Task" [ 1361.907497] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.915147] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52adf2ac-6432-d0db-c8f7-4afc1c39c41d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.929000] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.442s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.929613] env[62619]: DEBUG nova.compute.manager [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1361.933037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.526s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.933696] env[62619]: INFO nova.compute.claims [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1361.993175] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquiring lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.993449] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.993778] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquiring lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.993842] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.993981] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.996044] env[62619]: INFO nova.compute.manager [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Terminating instance [ 1362.224645] env[62619]: DEBUG oslo_concurrency.lockutils [req-3f5f6965-249f-49e7-af83-997e51d0e9b6 req-1d577c1a-d6c5-45b0-a3f0-3299d53129be service nova] Releasing lock "refresh_cache-e0e25ddd-3692-480f-bfa0-212741c0d882" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.229146] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.273141] env[62619]: INFO nova.compute.manager [-] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Took 1.78 seconds to deallocate network for instance. [ 1362.347993] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777134, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.418613] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52adf2ac-6432-d0db-c8f7-4afc1c39c41d, 'name': SearchDatastore_Task, 'duration_secs': 0.018048} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.418997] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.419418] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e0e25ddd-3692-480f-bfa0-212741c0d882/e0e25ddd-3692-480f-bfa0-212741c0d882.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1362.420140] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e0d57f0-675e-4cdd-af5b-030e3dfb5e99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.430298] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for the task: (returnval){ [ 1362.430298] env[62619]: value = "task-1777135" [ 1362.430298] env[62619]: _type = "Task" [ 1362.430298] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.440055] env[62619]: DEBUG nova.compute.utils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1362.446210] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.446210] env[62619]: DEBUG nova.compute.manager [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1362.446210] env[62619]: DEBUG nova.network.neutron [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1362.500542] env[62619]: DEBUG nova.compute.manager [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1362.500542] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1362.501292] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cf25b5-3caa-4eb7-8e13-102e01b739ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.515368] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1362.519142] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdb4b011-839c-4f41-8969-1869f459c295 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.525276] env[62619]: DEBUG oslo_vmware.api [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for the task: (returnval){ [ 1362.525276] env[62619]: value = "task-1777136" [ 1362.525276] env[62619]: _type = "Task" [ 1362.525276] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.536590] env[62619]: DEBUG oslo_vmware.api [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777136, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.771975] env[62619]: DEBUG nova.policy [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb56261151994b459d40b190725f3867', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'af811eaa982a4c329e8ab9b58f4c8695', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1362.775518] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9776ffa-4061-409b-8996-08bdd4efe739 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.779807] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.786424] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Doing hard reboot of VM {{(pid=62619) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1362.786424] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-bca99a14-0e3c-4bf9-94fe-26564733d538 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.793937] env[62619]: DEBUG oslo_vmware.api [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1362.793937] env[62619]: value = "task-1777137" [ 1362.793937] env[62619]: _type = "Task" [ 1362.793937] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.810036] env[62619]: DEBUG oslo_vmware.api [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777137, 'name': ResetVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.842352] env[62619]: DEBUG oslo_vmware.api [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777134, 'name': PowerOnVM_Task, 'duration_secs': 0.837393} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.843391] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.843391] env[62619]: INFO nova.compute.manager [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Took 12.03 seconds to spawn the instance on the hypervisor. [ 1362.845051] env[62619]: DEBUG nova.compute.manager [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1362.846012] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214baa8c-791d-44d6-b7a7-72d8b5618e07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.939892] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777135, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.948501] env[62619]: DEBUG nova.compute.manager [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1363.036293] env[62619]: DEBUG oslo_vmware.api [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777136, 'name': PowerOffVM_Task, 'duration_secs': 0.199332} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.037024] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1363.038528] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1363.038528] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20d6d3a8-f2de-4338-9e08-4dbe93d04fe0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.159094] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1363.159424] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1363.159638] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Deleting the datastore file [datastore1] 4f08d36b-f26e-499e-a4be-d8cbb481a44d {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1363.159944] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20804edc-670e-4731-9ba5-a22b49b0bac0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.166505] env[62619]: DEBUG oslo_vmware.api [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for the task: (returnval){ [ 1363.166505] env[62619]: value = "task-1777139" [ 1363.166505] env[62619]: _type = "Task" [ 1363.166505] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.177523] env[62619]: DEBUG oslo_vmware.api [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777139, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.276178] env[62619]: DEBUG nova.compute.manager [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1363.277246] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa39f525-329c-4fdb-91df-46c62799633d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.305104] env[62619]: DEBUG oslo_vmware.api [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777137, 'name': ResetVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.340818] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc93168b-c29a-47b1-95df-c072219e7e3b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.348409] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808d201e-9794-469a-98f7-ad2e13eff6e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.386113] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8380988-b801-4ad5-a90e-63e1c228b6da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.396076] env[62619]: INFO nova.compute.manager [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Took 22.82 seconds to build instance. [ 1363.398921] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c5dbcf-9732-4d57-8a9a-c6462714e15e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.413846] env[62619]: DEBUG nova.compute.provider_tree [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.441913] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594188} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.442600] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e0e25ddd-3692-480f-bfa0-212741c0d882/e0e25ddd-3692-480f-bfa0-212741c0d882.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1363.442600] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1363.444560] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e2ef7ca-1178-4f35-a6e6-56cd04f39616 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.453232] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for the task: (returnval){ [ 1363.453232] env[62619]: value = "task-1777140" [ 1363.453232] env[62619]: _type = "Task" [ 1363.453232] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.468864] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777140, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.681183] env[62619]: DEBUG oslo_vmware.api [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777139, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.794437] env[62619]: INFO nova.compute.manager [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] instance snapshotting [ 1363.794437] env[62619]: WARNING nova.compute.manager [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1363.800146] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a66c240-454c-42b6-816c-a93886ad425d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.807663] env[62619]: DEBUG oslo_vmware.api [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777137, 'name': ResetVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.822279] env[62619]: DEBUG nova.network.neutron [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Successfully created port: 5530a947-e30a-4156-be0f-8e8dd90d2aef {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1363.826021] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1e21ca-1ac4-4370-8824-92cb73b36bac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.899073] env[62619]: DEBUG oslo_concurrency.lockutils [None req-587c291d-a474-4f92-850e-68a534a0ede9 tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.335s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.916853] env[62619]: DEBUG nova.scheduler.client.report [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1363.966476] env[62619]: DEBUG nova.compute.manager [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1363.968547] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777140, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077137} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.969387] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1363.970195] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba9e575-34ac-43da-aabc-90d97de68451 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.994791] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] e0e25ddd-3692-480f-bfa0-212741c0d882/e0e25ddd-3692-480f-bfa0-212741c0d882.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1363.997515] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1de75b9b-35ad-4915-b927-31fc1bca0914 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.018033] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for the task: (returnval){ [ 1364.018033] env[62619]: value = "task-1777141" [ 1364.018033] env[62619]: _type = "Task" [ 1364.018033] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.020249] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1364.020543] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1364.020623] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1364.020786] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1364.020928] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1364.021084] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1364.021320] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1364.021577] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1364.021765] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1364.021925] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1364.022103] env[62619]: DEBUG nova.virt.hardware [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1364.023349] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f8d88c-3325-43eb-b35e-d44796c70596 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.036914] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8982c75-254d-4f44-913f-030a320cfb93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.041236] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777141, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.180072] env[62619]: DEBUG oslo_vmware.api [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Task: {'id': task-1777139, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.783011} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.180353] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1364.180538] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1364.180740] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1364.180932] env[62619]: INFO nova.compute.manager [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1364.181508] env[62619]: DEBUG oslo.service.loopingcall [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1364.181742] env[62619]: DEBUG nova.compute.manager [-] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1364.181851] env[62619]: DEBUG nova.network.neutron [-] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1364.305376] env[62619]: DEBUG oslo_vmware.api [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777137, 'name': ResetVM_Task, 'duration_secs': 1.117739} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.305638] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Did hard reboot of VM {{(pid=62619) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1364.306405] env[62619]: DEBUG nova.compute.manager [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1364.306714] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456dd57b-8424-4b2a-82ab-cc699650c830 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.336761] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1364.336761] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c21e3b3e-8357-4304-8811-ccdbb0ca90a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.349265] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1364.349265] env[62619]: value = "task-1777142" [ 1364.349265] env[62619]: _type = "Task" [ 1364.349265] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.366111] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777142, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.402895] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1364.423356] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.423356] env[62619]: DEBUG nova.compute.manager [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1364.428314] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.888s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.431346] env[62619]: INFO nova.compute.claims [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1364.534542] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777141, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.819980] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb4fefa-eddb-4905-9c1f-837aa86ce436 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.800s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.859452] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777142, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.928309] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.943480] env[62619]: DEBUG nova.compute.utils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1364.946992] env[62619]: DEBUG nova.compute.manager [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1364.947202] env[62619]: DEBUG nova.network.neutron [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1364.997424] env[62619]: DEBUG nova.policy [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'efb63d88d77a4c039812247e21e928ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9a96018b71f4e6db335ff7deeb6c4b4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1365.037751] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777141, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.327104] env[62619]: DEBUG nova.network.neutron [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Successfully created port: 3af5cb8d-ea8b-4677-920d-5e06ecc2843b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1365.362630] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777142, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.448194] env[62619]: DEBUG nova.compute.manager [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1365.537015] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777141, 'name': ReconfigVM_Task, 'duration_secs': 1.158142} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.538936] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Reconfigured VM instance instance-00000009 to attach disk [datastore1] e0e25ddd-3692-480f-bfa0-212741c0d882/e0e25ddd-3692-480f-bfa0-212741c0d882.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1365.541619] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-620474cd-aab5-4a6b-9ef3-62cd11bacad3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.549451] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for the task: (returnval){ [ 1365.549451] env[62619]: value = "task-1777143" [ 1365.549451] env[62619]: _type = "Task" [ 1365.549451] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.560213] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777143, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.578557] env[62619]: DEBUG nova.compute.manager [req-7afebf88-cf10-42e4-ba52-1485faf44012 req-d71bccad-1c80-47ea-8cbf-1d70c82aa4f4 service nova] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Received event network-vif-deleted-50d706b8-8aee-4647-b813-73db43cd22df {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1365.744927] env[62619]: DEBUG nova.network.neutron [-] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.868861] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777142, 'name': CreateSnapshot_Task, 'duration_secs': 1.260238} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.868861] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1365.869915] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9611dd5-1d84-4717-a60f-7e2c8c0edd1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.893082] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ce9737-42e4-401f-94e7-7998e4e83f73 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.901225] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db9bea3-aeb0-459c-b90e-94743a4e0636 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.939839] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51df78a-7c3f-47e5-a453-ebcbaef1e412 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.950210] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c632bf1-032a-4e28-8900-cc4d3a6ff054 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.970302] env[62619]: DEBUG nova.compute.provider_tree [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1366.060563] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777143, 'name': Rename_Task, 'duration_secs': 0.160205} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.060974] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1366.061222] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee34ae6e-3f21-4531-bd66-1ac059eef264 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.070112] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for the task: (returnval){ [ 1366.070112] env[62619]: value = "task-1777144" [ 1366.070112] env[62619]: _type = "Task" [ 1366.070112] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.077353] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.247297] env[62619]: INFO nova.compute.manager [-] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Took 2.07 seconds to deallocate network for instance. [ 1366.398557] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1366.401241] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-765ffc4d-8fe9-4690-84e2-def714a0dd91 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.411549] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1366.411549] env[62619]: value = "task-1777145" [ 1366.411549] env[62619]: _type = "Task" [ 1366.411549] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.420982] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777145, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.475446] env[62619]: DEBUG nova.compute.manager [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1366.478908] env[62619]: DEBUG nova.scheduler.client.report [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1366.527047] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1366.527264] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1366.529650] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1366.529758] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1366.529913] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1366.530072] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1366.530288] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1366.530445] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1366.530616] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1366.530791] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1366.531361] env[62619]: DEBUG nova.virt.hardware [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1366.532294] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df9d293-2f9e-4cd0-ac01-ed23b748ab10 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.546440] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28238d1-eec5-4575-a277-d0cebe9a92e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.579040] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777144, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.756687] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.823993] env[62619]: DEBUG nova.network.neutron [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Successfully updated port: 5530a947-e30a-4156-be0f-8e8dd90d2aef {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1366.828682] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquiring lock "e98120b4-7916-4ce4-88ef-0c904852bb1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.828898] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lock "e98120b4-7916-4ce4-88ef-0c904852bb1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.829114] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquiring lock "e98120b4-7916-4ce4-88ef-0c904852bb1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.829351] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lock "e98120b4-7916-4ce4-88ef-0c904852bb1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.829594] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lock "e98120b4-7916-4ce4-88ef-0c904852bb1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.834763] env[62619]: INFO nova.compute.manager [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Terminating instance [ 1366.923348] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777145, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.990820] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.991115] env[62619]: DEBUG nova.compute.manager [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1366.994127] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.066s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.995680] env[62619]: INFO nova.compute.claims [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1367.085978] env[62619]: DEBUG oslo_vmware.api [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777144, 'name': PowerOnVM_Task, 'duration_secs': 0.910383} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.085978] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1367.085978] env[62619]: INFO nova.compute.manager [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Took 11.10 seconds to spawn the instance on the hypervisor. [ 1367.085978] env[62619]: DEBUG nova.compute.manager [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1367.085978] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e123c47-6115-457d-838c-bd62b4a4f7b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.189915] env[62619]: DEBUG nova.network.neutron [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Successfully updated port: 3af5cb8d-ea8b-4677-920d-5e06ecc2843b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1367.329260] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.329719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.329719] env[62619]: DEBUG nova.network.neutron [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1367.340548] env[62619]: DEBUG nova.compute.manager [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1367.340766] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1367.342338] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732e0fe8-b77b-4ecd-9e67-9933604f9585 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.352589] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1367.352843] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0de5c05-84fe-4f0b-8074-54a782e1c121 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.365689] env[62619]: DEBUG oslo_vmware.api [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for the task: (returnval){ [ 1367.365689] env[62619]: value = "task-1777146" [ 1367.365689] env[62619]: _type = "Task" [ 1367.365689] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.374456] env[62619]: DEBUG oslo_vmware.api [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777146, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.423536] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777145, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.503258] env[62619]: DEBUG nova.compute.utils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1367.511580] env[62619]: DEBUG nova.compute.manager [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1367.608335] env[62619]: INFO nova.compute.manager [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Took 26.86 seconds to build instance. [ 1367.695566] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquiring lock "refresh_cache-ed34ae20-a891-45aa-8124-f36f264937f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.695764] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquired lock "refresh_cache-ed34ae20-a891-45aa-8124-f36f264937f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.696097] env[62619]: DEBUG nova.network.neutron [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1367.877249] env[62619]: DEBUG oslo_vmware.api [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777146, 'name': PowerOffVM_Task, 'duration_secs': 0.321038} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.877249] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1367.877249] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1367.877249] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86639f0c-fb72-4dba-984d-10fac1bd14e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.883047] env[62619]: DEBUG nova.network.neutron [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1367.925929] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777145, 'name': CloneVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.966204] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1367.966204] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1367.966204] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Deleting the datastore file [datastore1] e98120b4-7916-4ce4-88ef-0c904852bb1f {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1367.966204] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-478ce8f6-bae2-41ee-84bc-bb6feb6d648e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.974071] env[62619]: DEBUG oslo_vmware.api [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for the task: (returnval){ [ 1367.974071] env[62619]: value = "task-1777148" [ 1367.974071] env[62619]: _type = "Task" [ 1367.974071] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.981659] env[62619]: DEBUG oslo_vmware.api [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.015312] env[62619]: DEBUG nova.compute.manager [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1368.114476] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f49f84b1-c49d-41b7-8611-c404f6437f8d tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lock "e0e25ddd-3692-480f-bfa0-212741c0d882" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.385s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.189846] env[62619]: DEBUG nova.network.neutron [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance_info_cache with network_info: [{"id": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "address": "fa:16:3e:96:ec:35", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5530a947-e3", "ovs_interfaceid": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.273630] env[62619]: DEBUG nova.network.neutron [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1368.418995] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquiring lock "4374c102-a6fe-45ef-ad49-a1295f96899a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.419333] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Lock "4374c102-a6fe-45ef-ad49-a1295f96899a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.430721] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquiring lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.431424] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.437752] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777145, 'name': CloneVM_Task, 'duration_secs': 1.587127} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.438067] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Created linked-clone VM from snapshot [ 1368.441747] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0df3e36-9cb0-4411-96f8-ad2557984d5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.452723] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Uploading image 6e31803f-a937-40b2-93d4-307e624edb77 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1368.463839] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418d7465-9a83-4f7d-b77a-0805c07d3503 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.477063] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abb09a0-7f66-4ae0-b5ac-fce6436349be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.487541] env[62619]: DEBUG oslo_vmware.rw_handles [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1368.487541] env[62619]: value = "vm-368902" [ 1368.487541] env[62619]: _type = "VirtualMachine" [ 1368.487541] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1368.487541] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0fcd6229-71cf-4bf3-aca6-fcb8c5c5109e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.493315] env[62619]: DEBUG oslo_vmware.api [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Task: {'id': task-1777148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.475545} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.520789] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1368.521064] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1368.521278] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1368.521484] env[62619]: INFO nova.compute.manager [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1368.521743] env[62619]: DEBUG oslo.service.loopingcall [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1368.530706] env[62619]: DEBUG nova.compute.manager [-] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1368.530706] env[62619]: DEBUG nova.network.neutron [-] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1368.532276] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf50ccdf-3c87-47a6-9b2d-82aae15a6561 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.541941] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41906879-5b19-45a7-bf9d-0ced42a38385 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.545821] env[62619]: DEBUG oslo_vmware.rw_handles [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lease: (returnval){ [ 1368.545821] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c30a65-6d79-5343-3b5b-00cdbf6af8d2" [ 1368.545821] env[62619]: _type = "HttpNfcLease" [ 1368.545821] env[62619]: } obtained for exporting VM: (result){ [ 1368.545821] env[62619]: value = "vm-368902" [ 1368.545821] env[62619]: _type = "VirtualMachine" [ 1368.545821] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1368.546127] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the lease: (returnval){ [ 1368.546127] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c30a65-6d79-5343-3b5b-00cdbf6af8d2" [ 1368.546127] env[62619]: _type = "HttpNfcLease" [ 1368.546127] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1368.559019] env[62619]: DEBUG nova.compute.provider_tree [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1368.562376] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1368.562376] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c30a65-6d79-5343-3b5b-00cdbf6af8d2" [ 1368.562376] env[62619]: _type = "HttpNfcLease" [ 1368.562376] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1368.562619] env[62619]: DEBUG oslo_vmware.rw_handles [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1368.562619] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c30a65-6d79-5343-3b5b-00cdbf6af8d2" [ 1368.562619] env[62619]: _type = "HttpNfcLease" [ 1368.562619] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1368.564314] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36d99e8-9f69-4b33-bc4d-7fba4230bcd6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.572143] env[62619]: DEBUG oslo_vmware.rw_handles [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bfeced-0ab9-3e23-0e46-827a13fe9683/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1368.572369] env[62619]: DEBUG oslo_vmware.rw_handles [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bfeced-0ab9-3e23-0e46-827a13fe9683/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1368.628783] env[62619]: DEBUG nova.compute.manager [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1368.674726] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f4c49a58-e9ee-4b11-9cc1-4f5803c23e2c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.694878] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1368.695374] env[62619]: DEBUG nova.compute.manager [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Instance network_info: |[{"id": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "address": "fa:16:3e:96:ec:35", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5530a947-e3", "ovs_interfaceid": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1368.695575] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:ec:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5530a947-e30a-4156-be0f-8e8dd90d2aef', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1368.703300] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Creating folder: Project (af811eaa982a4c329e8ab9b58f4c8695). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1368.705593] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7415448-afeb-4794-b967-f3e2694c930d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.720769] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Created folder: Project (af811eaa982a4c329e8ab9b58f4c8695) in parent group-v368875. [ 1368.720769] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Creating folder: Instances. Parent ref: group-v368903. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1368.720769] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31558987-7c8b-421a-a0ef-90cb869dfa43 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.729701] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Created folder: Instances in parent group-v368903. [ 1368.729764] env[62619]: DEBUG oslo.service.loopingcall [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1368.729907] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1368.730128] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fc2f9e9-5779-4141-ad5a-618368280204 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.749723] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1368.749723] env[62619]: value = "task-1777152" [ 1368.749723] env[62619]: _type = "Task" [ 1368.749723] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.757714] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777152, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.788963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquiring lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.789350] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.792246] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquiring lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.792246] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.792246] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.792508] env[62619]: INFO nova.compute.manager [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Terminating instance [ 1368.846124] env[62619]: DEBUG nova.compute.manager [req-f5a70ae6-04de-4c5a-ad38-b40f36866df1 req-8de1ee4b-d655-4236-a114-8077df7ce84a service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Received event network-vif-plugged-3af5cb8d-ea8b-4677-920d-5e06ecc2843b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1368.846418] env[62619]: DEBUG oslo_concurrency.lockutils [req-f5a70ae6-04de-4c5a-ad38-b40f36866df1 req-8de1ee4b-d655-4236-a114-8077df7ce84a service nova] Acquiring lock "ed34ae20-a891-45aa-8124-f36f264937f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.846661] env[62619]: DEBUG oslo_concurrency.lockutils [req-f5a70ae6-04de-4c5a-ad38-b40f36866df1 req-8de1ee4b-d655-4236-a114-8077df7ce84a service nova] Lock "ed34ae20-a891-45aa-8124-f36f264937f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.849505] env[62619]: DEBUG oslo_concurrency.lockutils [req-f5a70ae6-04de-4c5a-ad38-b40f36866df1 req-8de1ee4b-d655-4236-a114-8077df7ce84a service nova] Lock "ed34ae20-a891-45aa-8124-f36f264937f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.849505] env[62619]: DEBUG nova.compute.manager [req-f5a70ae6-04de-4c5a-ad38-b40f36866df1 req-8de1ee4b-d655-4236-a114-8077df7ce84a service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] No waiting events found dispatching network-vif-plugged-3af5cb8d-ea8b-4677-920d-5e06ecc2843b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1368.849505] env[62619]: WARNING nova.compute.manager [req-f5a70ae6-04de-4c5a-ad38-b40f36866df1 req-8de1ee4b-d655-4236-a114-8077df7ce84a service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Received unexpected event network-vif-plugged-3af5cb8d-ea8b-4677-920d-5e06ecc2843b for instance with vm_state building and task_state spawning. [ 1368.889098] env[62619]: DEBUG nova.network.neutron [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Updating instance_info_cache with network_info: [{"id": "3af5cb8d-ea8b-4677-920d-5e06ecc2843b", "address": "fa:16:3e:e3:16:0b", "network": {"id": "73bdc485-118f-4877-afde-edd7e6119b94", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1994939706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9a96018b71f4e6db335ff7deeb6c4b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3af5cb8d-ea", "ovs_interfaceid": "3af5cb8d-ea8b-4677-920d-5e06ecc2843b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.991798] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "b6aae13f-0711-4421-9d55-de7ece3e4b89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.992439] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "b6aae13f-0711-4421-9d55-de7ece3e4b89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.031906] env[62619]: DEBUG nova.compute.manager [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1369.056271] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1369.057879] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1369.058114] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1369.058369] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1369.058578] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1369.058758] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1369.058991] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1369.059194] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1369.059355] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1369.059513] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1369.059840] env[62619]: DEBUG nova.virt.hardware [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1369.061407] env[62619]: DEBUG nova.scheduler.client.report [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1369.068026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3aa558-fcc6-419c-8ea0-75eb6ce2f93e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.076959] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f7f83b-e8f8-4d43-b2b3-282facf52ebd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.101800] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1369.110206] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Creating folder: Project (6061338c7f9443a1ada79821081adc68). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1369.110206] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0037dd8e-9aa4-4eb4-b4ca-af47e521658c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.120171] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Created folder: Project (6061338c7f9443a1ada79821081adc68) in parent group-v368875. [ 1369.120432] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Creating folder: Instances. Parent ref: group-v368906. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1369.120727] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-535e5793-d782-43c1-9898-d7ce47c46677 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.144216] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Created folder: Instances in parent group-v368906. [ 1369.144931] env[62619]: DEBUG oslo.service.loopingcall [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1369.149591] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1369.151231] env[62619]: DEBUG nova.compute.manager [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Received event network-vif-deleted-1de1b35e-d2ff-4fda-91e3-2ba87ff24c9a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1369.151540] env[62619]: DEBUG nova.compute.manager [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Received event network-vif-plugged-5530a947-e30a-4156-be0f-8e8dd90d2aef {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1369.151626] env[62619]: DEBUG oslo_concurrency.lockutils [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] Acquiring lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.152013] env[62619]: DEBUG oslo_concurrency.lockutils [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.152181] env[62619]: DEBUG oslo_concurrency.lockutils [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.153662] env[62619]: DEBUG nova.compute.manager [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] No waiting events found dispatching network-vif-plugged-5530a947-e30a-4156-be0f-8e8dd90d2aef {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1369.153843] env[62619]: WARNING nova.compute.manager [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Received unexpected event network-vif-plugged-5530a947-e30a-4156-be0f-8e8dd90d2aef for instance with vm_state building and task_state spawning. [ 1369.154168] env[62619]: DEBUG nova.compute.manager [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Received event network-changed-5530a947-e30a-4156-be0f-8e8dd90d2aef {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1369.156496] env[62619]: DEBUG nova.compute.manager [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Refreshing instance network info cache due to event network-changed-5530a947-e30a-4156-be0f-8e8dd90d2aef. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1369.156496] env[62619]: DEBUG oslo_concurrency.lockutils [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] Acquiring lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.156496] env[62619]: DEBUG oslo_concurrency.lockutils [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] Acquired lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.156496] env[62619]: DEBUG nova.network.neutron [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Refreshing network info cache for port 5530a947-e30a-4156-be0f-8e8dd90d2aef {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1369.157431] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-beebc04c-e51e-4589-a710-254be106497d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.171801] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.180486] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1369.180486] env[62619]: value = "task-1777155" [ 1369.180486] env[62619]: _type = "Task" [ 1369.180486] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.190089] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777155, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.260349] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777152, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.296343] env[62619]: DEBUG nova.compute.manager [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1369.296918] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1369.299184] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72bcb9a-b745-4953-8652-d1d2089be6da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.310309] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1369.310743] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7916fc4-ddb5-43b3-9dd2-9f71703f11c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.317773] env[62619]: DEBUG oslo_vmware.api [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for the task: (returnval){ [ 1369.317773] env[62619]: value = "task-1777156" [ 1369.317773] env[62619]: _type = "Task" [ 1369.317773] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.326019] env[62619]: DEBUG oslo_vmware.api [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.392199] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Releasing lock "refresh_cache-ed34ae20-a891-45aa-8124-f36f264937f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1369.392537] env[62619]: DEBUG nova.compute.manager [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Instance network_info: |[{"id": "3af5cb8d-ea8b-4677-920d-5e06ecc2843b", "address": "fa:16:3e:e3:16:0b", "network": {"id": "73bdc485-118f-4877-afde-edd7e6119b94", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1994939706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9a96018b71f4e6db335ff7deeb6c4b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3af5cb8d-ea", "ovs_interfaceid": "3af5cb8d-ea8b-4677-920d-5e06ecc2843b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1369.392983] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:16:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11b669be-fb26-4ef8-bdb6-c77ab9d06daf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3af5cb8d-ea8b-4677-920d-5e06ecc2843b', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1369.405439] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Creating folder: Project (a9a96018b71f4e6db335ff7deeb6c4b4). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1369.406997] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-594699dd-5ce5-44ee-a920-48c0b3fa434d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.421442] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Created folder: Project (a9a96018b71f4e6db335ff7deeb6c4b4) in parent group-v368875. [ 1369.421442] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Creating folder: Instances. Parent ref: group-v368909. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1369.421442] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ee8cc19-635a-41a1-9144-2085d0e62474 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.440738] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Created folder: Instances in parent group-v368909. [ 1369.446094] env[62619]: DEBUG oslo.service.loopingcall [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1369.446094] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1369.446094] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebdf24fb-6983-4678-9ddf-8490e5287df8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.474300] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1369.474300] env[62619]: value = "task-1777159" [ 1369.474300] env[62619]: _type = "Task" [ 1369.474300] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.490569] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777159, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.571593] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.577s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.572370] env[62619]: DEBUG nova.compute.manager [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1369.577034] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.823s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.579619] env[62619]: INFO nova.compute.claims [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1369.651289] env[62619]: DEBUG nova.network.neutron [-] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.693909] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777155, 'name': CreateVM_Task, 'duration_secs': 0.419973} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.697036] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1369.697036] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.697036] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.697036] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1369.697036] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f2ddefe-7b97-423d-95aa-019968a8574c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.707176] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for the task: (returnval){ [ 1369.707176] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b91e28-9bee-6628-2446-385dc88ce717" [ 1369.707176] env[62619]: _type = "Task" [ 1369.707176] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.721027] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b91e28-9bee-6628-2446-385dc88ce717, 'name': SearchDatastore_Task, 'duration_secs': 0.012316} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.721566] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1369.722014] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1369.722379] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.722626] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.722897] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1369.723391] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7eb4695-3fd2-4e9a-85c3-7c8bb215e0ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.735250] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1369.735250] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1369.737415] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c20b1dd6-dbd8-4586-abe0-3e88d10be22b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.744293] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for the task: (returnval){ [ 1369.744293] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52cae583-a7a8-d31f-1619-9efe1b18ee69" [ 1369.744293] env[62619]: _type = "Task" [ 1369.744293] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.760086] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52cae583-a7a8-d31f-1619-9efe1b18ee69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.766340] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777152, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.828620] env[62619]: DEBUG oslo_vmware.api [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777156, 'name': PowerOffVM_Task, 'duration_secs': 0.246384} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.828900] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1369.829138] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1369.829419] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ce9d448-c933-403d-bbe2-1b3cbd78e883 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.916650] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1369.916650] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1369.916650] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Deleting the datastore file [datastore1] 28a8485c-fc0d-4fd0-8be9-37c49caf89b8 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1369.917101] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a00968a-4d90-4d72-ac54-c29cae8c5c41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.926393] env[62619]: DEBUG oslo_vmware.api [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for the task: (returnval){ [ 1369.926393] env[62619]: value = "task-1777161" [ 1369.926393] env[62619]: _type = "Task" [ 1369.926393] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.936965] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquiring lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.937629] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.937941] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquiring lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.938220] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.938452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.940789] env[62619]: DEBUG oslo_vmware.api [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.942167] env[62619]: INFO nova.compute.manager [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Terminating instance [ 1369.989149] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777159, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.075016] env[62619]: DEBUG nova.network.neutron [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updated VIF entry in instance network info cache for port 5530a947-e30a-4156-be0f-8e8dd90d2aef. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1370.076516] env[62619]: DEBUG nova.network.neutron [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance_info_cache with network_info: [{"id": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "address": "fa:16:3e:96:ec:35", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5530a947-e3", "ovs_interfaceid": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.093463] env[62619]: DEBUG nova.compute.utils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1370.095616] env[62619]: DEBUG nova.compute.manager [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1370.096565] env[62619]: DEBUG nova.network.neutron [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1370.156962] env[62619]: INFO nova.compute.manager [-] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Took 1.62 seconds to deallocate network for instance. [ 1370.170104] env[62619]: DEBUG nova.policy [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f5869dd1daa4009af5ae44a195fb8a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7dea13f34f140dd98291849f66720ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1370.218899] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquiring lock "e0e25ddd-3692-480f-bfa0-212741c0d882" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.219254] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lock "e0e25ddd-3692-480f-bfa0-212741c0d882" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.219805] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquiring lock "e0e25ddd-3692-480f-bfa0-212741c0d882-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.220252] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lock "e0e25ddd-3692-480f-bfa0-212741c0d882-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.220538] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lock "e0e25ddd-3692-480f-bfa0-212741c0d882-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.226314] env[62619]: INFO nova.compute.manager [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Terminating instance [ 1370.257070] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52cae583-a7a8-d31f-1619-9efe1b18ee69, 'name': SearchDatastore_Task, 'duration_secs': 0.017519} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.263910] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e33dd3a4-f687-413f-98b6-62a23324c61e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.270913] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for the task: (returnval){ [ 1370.270913] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52afe53a-ae9d-4c05-5a1a-b90bb569657f" [ 1370.270913] env[62619]: _type = "Task" [ 1370.270913] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.280579] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777152, 'name': CreateVM_Task, 'duration_secs': 1.446258} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.281290] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1370.282167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.282390] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.282788] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1370.287820] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08ef6b91-2f82-4582-9ffc-e19df6298f5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.289599] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52afe53a-ae9d-4c05-5a1a-b90bb569657f, 'name': SearchDatastore_Task, 'duration_secs': 0.012517} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.289973] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.290349] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1847c5d8-16eb-4feb-8a09-24ad6728e59c/1847c5d8-16eb-4feb-8a09-24ad6728e59c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1370.291554] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b7f38ae-6e11-4594-882a-73f59723d4c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.297292] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1370.297292] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522bf39e-d91e-bcbb-7426-1505f13d4a46" [ 1370.297292] env[62619]: _type = "Task" [ 1370.297292] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.301992] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for the task: (returnval){ [ 1370.301992] env[62619]: value = "task-1777162" [ 1370.301992] env[62619]: _type = "Task" [ 1370.301992] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.309069] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522bf39e-d91e-bcbb-7426-1505f13d4a46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.315482] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.440412] env[62619]: DEBUG oslo_vmware.api [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Task: {'id': task-1777161, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253779} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.442897] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1370.443845] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1370.444346] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1370.444432] env[62619]: INFO nova.compute.manager [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1370.444713] env[62619]: DEBUG oslo.service.loopingcall [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1370.444934] env[62619]: DEBUG nova.compute.manager [-] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1370.445035] env[62619]: DEBUG nova.network.neutron [-] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1370.449933] env[62619]: DEBUG nova.compute.manager [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1370.450391] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1370.451176] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af941a17-94da-4c13-88ca-f84853322cfd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.459854] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1370.460166] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb768811-97ce-45df-a208-17f7ce20e11d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.467267] env[62619]: DEBUG oslo_vmware.api [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1370.467267] env[62619]: value = "task-1777163" [ 1370.467267] env[62619]: _type = "Task" [ 1370.467267] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.478779] env[62619]: DEBUG oslo_vmware.api [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.491037] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777159, 'name': CreateVM_Task, 'duration_secs': 0.937728} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.491037] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1370.491468] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.581353] env[62619]: DEBUG oslo_concurrency.lockutils [req-95db2603-c6ef-4e71-b453-009d9694fa38 req-5bdae9e6-880a-4c0c-b1de-e3aa1e1d8df7 service nova] Releasing lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.596838] env[62619]: DEBUG nova.compute.manager [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1370.667519] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.736911] env[62619]: DEBUG nova.compute.manager [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1370.737174] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1370.738423] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569df7cf-8924-4e96-aae7-92a8ce02e79a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.750023] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1370.750023] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f653f34-2ea4-4342-9f51-6ad98b8793d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.759149] env[62619]: DEBUG oslo_vmware.api [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for the task: (returnval){ [ 1370.759149] env[62619]: value = "task-1777164" [ 1370.759149] env[62619]: _type = "Task" [ 1370.759149] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.770442] env[62619]: DEBUG oslo_vmware.api [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777164, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.829109] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777162, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.837351] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522bf39e-d91e-bcbb-7426-1505f13d4a46, 'name': SearchDatastore_Task, 'duration_secs': 0.013247} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.838720] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.838720] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1370.838720] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.839043] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.839200] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1370.839563] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.839986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1370.840249] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3be2d5ac-1a6c-4eb2-9a13-4d4e8ecd84b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.842640] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69797c23-9c86-4d30-95ab-6111c190b6d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.849413] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for the task: (returnval){ [ 1370.849413] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5221d39d-81a2-190b-65fa-9049fd2a36c3" [ 1370.849413] env[62619]: _type = "Task" [ 1370.849413] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.856145] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1370.856379] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1370.860525] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15aacf34-6ae7-4f03-9cfb-b3e5061b51f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.868124] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5221d39d-81a2-190b-65fa-9049fd2a36c3, 'name': SearchDatastore_Task, 'duration_secs': 0.010855} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.868837] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.869567] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1370.869567] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.872566] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1370.872566] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52def38e-b5e6-16f0-3e61-2cc59a9aa90f" [ 1370.872566] env[62619]: _type = "Task" [ 1370.872566] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.880658] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52def38e-b5e6-16f0-3e61-2cc59a9aa90f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.988026] env[62619]: DEBUG oslo_vmware.api [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777163, 'name': PowerOffVM_Task, 'duration_secs': 0.254329} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.991919] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1370.991919] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1370.992126] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a81ec89-4957-49cf-859a-9e9b89c4c462 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.075464] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1371.075626] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1371.079088] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Deleting the datastore file [datastore1] b334cb41-5ddf-4545-8e2a-97c4d1de7cbf {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1371.079570] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df8b1082-76fd-4235-a5e0-1ef8767d111b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.090378] env[62619]: DEBUG oslo_vmware.api [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for the task: (returnval){ [ 1371.090378] env[62619]: value = "task-1777166" [ 1371.090378] env[62619]: _type = "Task" [ 1371.090378] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.101898] env[62619]: DEBUG oslo_vmware.api [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.124826] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dee079-ceba-4818-a0dc-a91f25807c22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.141080] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe599ea-6044-43d4-8729-1922d7ddffce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.184528] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbb2660-ba04-45d5-9031-4a3d3efaead4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.196842] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098e7ec5-46d8-4441-adf2-52d1a1ba757b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.219028] env[62619]: DEBUG nova.compute.provider_tree [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1371.271120] env[62619]: DEBUG oslo_vmware.api [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777164, 'name': PowerOffVM_Task, 'duration_secs': 0.267794} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.271389] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1371.271560] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1371.272202] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1587e21-93a3-451b-9aed-3edd94c8ce85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.323015] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777162, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565561} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.325232] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1847c5d8-16eb-4feb-8a09-24ad6728e59c/1847c5d8-16eb-4feb-8a09-24ad6728e59c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1371.325232] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1371.325232] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-795febbc-ce2e-489b-b211-2eecdce1adc8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.331546] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for the task: (returnval){ [ 1371.331546] env[62619]: value = "task-1777168" [ 1371.331546] env[62619]: _type = "Task" [ 1371.331546] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.340604] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.353685] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1371.354611] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1371.354789] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Deleting the datastore file [datastore1] e0e25ddd-3692-480f-bfa0-212741c0d882 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1371.355124] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01bba5d9-40cf-4f79-8810-39f1e87ea76d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.358236] env[62619]: DEBUG nova.network.neutron [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Successfully created port: 9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1371.366224] env[62619]: DEBUG oslo_vmware.api [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for the task: (returnval){ [ 1371.366224] env[62619]: value = "task-1777169" [ 1371.366224] env[62619]: _type = "Task" [ 1371.366224] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.385136] env[62619]: DEBUG oslo_vmware.api [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.386907] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52def38e-b5e6-16f0-3e61-2cc59a9aa90f, 'name': SearchDatastore_Task, 'duration_secs': 0.010947} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.388114] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f6526b1-535b-4896-8080-0afae1fa5fce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.394731] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1371.394731] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e36b27-6825-8ac8-5bfa-aabf5ccf63e3" [ 1371.394731] env[62619]: _type = "Task" [ 1371.394731] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.405575] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e36b27-6825-8ac8-5bfa-aabf5ccf63e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.607429] env[62619]: DEBUG oslo_vmware.api [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Task: {'id': task-1777166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154278} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.607429] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1371.607429] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1371.607429] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1371.607429] env[62619]: INFO nova.compute.manager [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1371.608167] env[62619]: DEBUG oslo.service.loopingcall [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1371.608167] env[62619]: DEBUG nova.compute.manager [-] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1371.608167] env[62619]: DEBUG nova.network.neutron [-] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1371.619052] env[62619]: DEBUG nova.compute.manager [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1371.665061] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:48:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='511523480',id=24,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1771445938',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1371.665329] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1371.665483] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1371.665657] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1371.665795] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1371.665932] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1371.666149] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1371.666300] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1371.666460] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1371.666614] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1371.666776] env[62619]: DEBUG nova.virt.hardware [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1371.667707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdae152-b21c-45ee-9a5c-423b87f8af07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.676531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.676726] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.680680] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc69c034-0ad7-4954-be6d-20b83c5f6ddc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.723219] env[62619]: DEBUG nova.scheduler.client.report [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1371.783661] env[62619]: DEBUG nova.compute.manager [req-6cece8aa-3e30-4264-aa59-18c1ff510738 req-bdd4f3fa-ab05-4b9d-a17c-1a6532ff648a service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Received event network-changed-3af5cb8d-ea8b-4677-920d-5e06ecc2843b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1371.783955] env[62619]: DEBUG nova.compute.manager [req-6cece8aa-3e30-4264-aa59-18c1ff510738 req-bdd4f3fa-ab05-4b9d-a17c-1a6532ff648a service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Refreshing instance network info cache due to event network-changed-3af5cb8d-ea8b-4677-920d-5e06ecc2843b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1371.785242] env[62619]: DEBUG oslo_concurrency.lockutils [req-6cece8aa-3e30-4264-aa59-18c1ff510738 req-bdd4f3fa-ab05-4b9d-a17c-1a6532ff648a service nova] Acquiring lock "refresh_cache-ed34ae20-a891-45aa-8124-f36f264937f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.785242] env[62619]: DEBUG oslo_concurrency.lockutils [req-6cece8aa-3e30-4264-aa59-18c1ff510738 req-bdd4f3fa-ab05-4b9d-a17c-1a6532ff648a service nova] Acquired lock "refresh_cache-ed34ae20-a891-45aa-8124-f36f264937f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.785242] env[62619]: DEBUG nova.network.neutron [req-6cece8aa-3e30-4264-aa59-18c1ff510738 req-bdd4f3fa-ab05-4b9d-a17c-1a6532ff648a service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Refreshing network info cache for port 3af5cb8d-ea8b-4677-920d-5e06ecc2843b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1371.846431] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083131} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.846710] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1371.847517] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319ead62-6662-4d6d-8100-c8a35ab9d084 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.878196] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 1847c5d8-16eb-4feb-8a09-24ad6728e59c/1847c5d8-16eb-4feb-8a09-24ad6728e59c.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1371.879076] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-074764ce-f270-4810-a859-85c3e5473b0f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.898209] env[62619]: DEBUG nova.network.neutron [-] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.905793] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for the task: (returnval){ [ 1371.905793] env[62619]: value = "task-1777170" [ 1371.905793] env[62619]: _type = "Task" [ 1371.905793] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.909280] env[62619]: DEBUG oslo_vmware.api [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Task: {'id': task-1777169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156234} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.917082] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1371.917082] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1371.917082] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1371.917082] env[62619]: INFO nova.compute.manager [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1371.917353] env[62619]: DEBUG oslo.service.loopingcall [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1371.917589] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e36b27-6825-8ac8-5bfa-aabf5ccf63e3, 'name': SearchDatastore_Task, 'duration_secs': 0.010537} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.918896] env[62619]: DEBUG nova.compute.manager [-] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1371.918896] env[62619]: DEBUG nova.network.neutron [-] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1371.920584] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1371.922852] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] aa4906f1-e801-4df0-819e-8c5fb5930fb5/aa4906f1-e801-4df0-819e-8c5fb5930fb5.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1371.922852] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.922852] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1371.922852] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-136a8bde-5e49-43f4-a247-978d7440f16b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.929638] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60d616a3-64f2-4810-8b0f-1d1ebb3d83ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.931673] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777170, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.936770] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1371.936770] env[62619]: value = "task-1777171" [ 1371.936770] env[62619]: _type = "Task" [ 1371.936770] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.945874] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1371.946129] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1371.948058] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae600919-c92b-4239-98e5-aabb18bdcd98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.953734] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.957444] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for the task: (returnval){ [ 1371.957444] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520964ff-6a96-06f1-21f8-46634a5acbbd" [ 1371.957444] env[62619]: _type = "Task" [ 1371.957444] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.967435] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520964ff-6a96-06f1-21f8-46634a5acbbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.987887] env[62619]: DEBUG nova.compute.manager [req-c3906cf5-4d7e-4e84-9c07-de206b322b2b req-ffa0e50b-570a-4fad-aa4c-8c0ec9bb0866 service nova] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Received event network-vif-deleted-0c0de74a-465b-4ba6-bc5c-860a32c90ff1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1372.231305] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.231305] env[62619]: DEBUG nova.compute.manager [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1372.236020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.590s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.236020] env[62619]: INFO nova.compute.claims [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1372.404830] env[62619]: INFO nova.compute.manager [-] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Took 1.96 seconds to deallocate network for instance. [ 1372.421546] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777170, 'name': ReconfigVM_Task, 'duration_secs': 0.500101} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.421797] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 1847c5d8-16eb-4feb-8a09-24ad6728e59c/1847c5d8-16eb-4feb-8a09-24ad6728e59c.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1372.422638] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45ccfdce-6c04-45cf-b8f9-2b64b2b678a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.430987] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for the task: (returnval){ [ 1372.430987] env[62619]: value = "task-1777172" [ 1372.430987] env[62619]: _type = "Task" [ 1372.430987] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.442936] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777172, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.453213] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777171, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.468583] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520964ff-6a96-06f1-21f8-46634a5acbbd, 'name': SearchDatastore_Task, 'duration_secs': 0.016983} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.469583] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd4625b7-5f50-4263-8ba2-8bb04dbf6bb5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.475870] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for the task: (returnval){ [ 1372.475870] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524d277f-0499-f1b2-16c6-aecc78b734d0" [ 1372.475870] env[62619]: _type = "Task" [ 1372.475870] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.485364] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524d277f-0499-f1b2-16c6-aecc78b734d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.740995] env[62619]: DEBUG nova.compute.utils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1372.745678] env[62619]: DEBUG nova.compute.manager [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1372.745678] env[62619]: DEBUG nova.network.neutron [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1372.917013] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.929415] env[62619]: DEBUG nova.policy [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a642f85f0187484480d998009032fb1d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a12b52b8da24c44806817fff5661ff0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1372.946693] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777172, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.961369] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777171, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.653852} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.962206] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] aa4906f1-e801-4df0-819e-8c5fb5930fb5/aa4906f1-e801-4df0-819e-8c5fb5930fb5.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1372.962504] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1372.962802] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-137a570b-c4c7-4ac7-8b95-6ab43259f6e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.976313] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1372.976313] env[62619]: value = "task-1777173" [ 1372.976313] env[62619]: _type = "Task" [ 1372.976313] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.992440] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777173, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.996544] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524d277f-0499-f1b2-16c6-aecc78b734d0, 'name': SearchDatastore_Task, 'duration_secs': 0.048961} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.996813] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.997156] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ed34ae20-a891-45aa-8124-f36f264937f8/ed34ae20-a891-45aa-8124-f36f264937f8.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1372.997505] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-631454bc-30b5-46f8-b191-f23f5068e337 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.006349] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for the task: (returnval){ [ 1373.006349] env[62619]: value = "task-1777174" [ 1373.006349] env[62619]: _type = "Task" [ 1373.006349] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.016918] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777174, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.255806] env[62619]: DEBUG nova.compute.manager [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1373.310571] env[62619]: DEBUG nova.network.neutron [req-6cece8aa-3e30-4264-aa59-18c1ff510738 req-bdd4f3fa-ab05-4b9d-a17c-1a6532ff648a service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Updated VIF entry in instance network info cache for port 3af5cb8d-ea8b-4677-920d-5e06ecc2843b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.311509] env[62619]: DEBUG nova.network.neutron [req-6cece8aa-3e30-4264-aa59-18c1ff510738 req-bdd4f3fa-ab05-4b9d-a17c-1a6532ff648a service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Updating instance_info_cache with network_info: [{"id": "3af5cb8d-ea8b-4677-920d-5e06ecc2843b", "address": "fa:16:3e:e3:16:0b", "network": {"id": "73bdc485-118f-4877-afde-edd7e6119b94", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1994939706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9a96018b71f4e6db335ff7deeb6c4b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3af5cb8d-ea", "ovs_interfaceid": "3af5cb8d-ea8b-4677-920d-5e06ecc2843b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.325025] env[62619]: DEBUG nova.network.neutron [-] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.445105] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "80363e16-5dd2-42ad-9ead-25b121d62211" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.445378] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "80363e16-5dd2-42ad-9ead-25b121d62211" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.462467] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777172, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.488481] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777173, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074588} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.490028] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1373.491153] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46779f8a-b976-4900-a73b-0c16191a7310 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.520614] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] aa4906f1-e801-4df0-819e-8c5fb5930fb5/aa4906f1-e801-4df0-819e-8c5fb5930fb5.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1373.530913] env[62619]: DEBUG nova.network.neutron [-] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.532069] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5111420a-e8bb-41ea-8706-3802b142243a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.552545] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777174, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477384} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.554662] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ed34ae20-a891-45aa-8124-f36f264937f8/ed34ae20-a891-45aa-8124-f36f264937f8.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1373.554662] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1373.554662] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1373.554662] env[62619]: value = "task-1777175" [ 1373.554662] env[62619]: _type = "Task" [ 1373.554662] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.555679] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a05a9c5d-404b-46d0-bfce-6c6f62a6c65b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.566848] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for the task: (returnval){ [ 1373.566848] env[62619]: value = "task-1777176" [ 1373.566848] env[62619]: _type = "Task" [ 1373.566848] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.570630] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777175, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.578929] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777176, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.818524] env[62619]: DEBUG oslo_concurrency.lockutils [req-6cece8aa-3e30-4264-aa59-18c1ff510738 req-bdd4f3fa-ab05-4b9d-a17c-1a6532ff648a service nova] Releasing lock "refresh_cache-ed34ae20-a891-45aa-8124-f36f264937f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.827870] env[62619]: INFO nova.compute.manager [-] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Took 2.22 seconds to deallocate network for instance. [ 1373.849093] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0043d1-58ae-44f6-94bc-f048a1d82269 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.857997] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70fcfe8-f971-4a31-a6dd-54464091ff12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.897146] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6394bb7a-beed-4128-8e5e-b162247e5d86 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.907377] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970a42a3-9f2e-40ca-a48e-af48e624ab84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.923700] env[62619]: DEBUG nova.compute.provider_tree [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.948905] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777172, 'name': Rename_Task, 'duration_secs': 1.1152} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.949581] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1373.949581] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19ecb745-e14b-42f5-b3eb-da32d085867c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.956252] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for the task: (returnval){ [ 1373.956252] env[62619]: value = "task-1777177" [ 1373.956252] env[62619]: _type = "Task" [ 1373.956252] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.965363] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.047944] env[62619]: INFO nova.compute.manager [-] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Took 2.13 seconds to deallocate network for instance. [ 1374.071175] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777175, 'name': ReconfigVM_Task, 'duration_secs': 0.295714} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.074293] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Reconfigured VM instance instance-0000000a to attach disk [datastore1] aa4906f1-e801-4df0-819e-8c5fb5930fb5/aa4906f1-e801-4df0-819e-8c5fb5930fb5.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1374.075290] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f9f243d-7c3d-4074-a078-0120a5c6fea5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.081737] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777176, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074534} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.083092] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1374.083445] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1374.083445] env[62619]: value = "task-1777178" [ 1374.083445] env[62619]: _type = "Task" [ 1374.083445] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.084140] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37e822c-c662-4a68-b9e6-c1eaf05584b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.094742] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777178, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.113238] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] ed34ae20-a891-45aa-8124-f36f264937f8/ed34ae20-a891-45aa-8124-f36f264937f8.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1374.114391] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2747f798-a54f-4abd-bf1a-cb8a838788db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.134649] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for the task: (returnval){ [ 1374.134649] env[62619]: value = "task-1777179" [ 1374.134649] env[62619]: _type = "Task" [ 1374.134649] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.144878] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777179, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.280075] env[62619]: DEBUG nova.compute.manager [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1374.317408] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1374.317966] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1374.318235] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1374.318471] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1374.318699] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1374.318912] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1374.319215] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1374.319486] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1374.319721] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1374.320038] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1374.320739] env[62619]: DEBUG nova.virt.hardware [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1374.321625] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a7c298-0e0e-401a-90d8-1075c14f56b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.331097] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2462a268-5e41-4d45-a7f8-0e5ad6a8e098 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.340436] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.428721] env[62619]: DEBUG nova.scheduler.client.report [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1374.473521] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777177, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.559312] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.603016] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777178, 'name': Rename_Task, 'duration_secs': 0.137176} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.603379] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1374.603658] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8fbc345-d11c-4bca-991b-6d8cddbf2b02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.646493] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777179, 'name': ReconfigVM_Task, 'duration_secs': 0.307463} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.650814] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Reconfigured VM instance instance-0000000b to attach disk [datastore1] ed34ae20-a891-45aa-8124-f36f264937f8/ed34ae20-a891-45aa-8124-f36f264937f8.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1374.651350] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1374.651350] env[62619]: value = "task-1777180" [ 1374.651350] env[62619]: _type = "Task" [ 1374.651350] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.651559] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e6625d6-1ef2-4bec-98d1-26b925c2612b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.673895] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777180, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.675500] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for the task: (returnval){ [ 1374.675500] env[62619]: value = "task-1777181" [ 1374.675500] env[62619]: _type = "Task" [ 1374.675500] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.687509] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777181, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.936286] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.703s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.937268] env[62619]: DEBUG nova.compute.manager [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1374.940770] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.962s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.942683] env[62619]: INFO nova.compute.claims [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1374.974144] env[62619]: DEBUG oslo_vmware.api [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777177, 'name': PowerOnVM_Task, 'duration_secs': 0.746852} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.974891] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1374.974891] env[62619]: INFO nova.compute.manager [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Took 5.94 seconds to spawn the instance on the hypervisor. [ 1374.975044] env[62619]: DEBUG nova.compute.manager [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1374.975835] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50346853-ed4c-42d5-8bfd-8843fff69a2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.160668] env[62619]: DEBUG nova.network.neutron [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Successfully created port: 2de36e51-915e-40de-9915-eb1bca9827ad {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1375.167323] env[62619]: DEBUG nova.compute.manager [req-74daa1b3-9997-4147-851b-8df2ce04f65e req-fcdc56c9-537c-4248-8aac-c951358baa0f service nova] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Received event network-vif-deleted-9f1844c8-15d7-4f5f-9ed1-366ddd63f5ff {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1375.167520] env[62619]: DEBUG nova.compute.manager [req-74daa1b3-9997-4147-851b-8df2ce04f65e req-fcdc56c9-537c-4248-8aac-c951358baa0f service nova] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Received event network-vif-deleted-e9bd11b1-25ab-47d5-a138-85ec70cd7a3b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1375.175976] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777180, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.188369] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777181, 'name': Rename_Task, 'duration_secs': 0.261425} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.188637] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1375.190018] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af4c78b2-73e4-4114-acb2-5ccd5be4acfb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.196863] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for the task: (returnval){ [ 1375.196863] env[62619]: value = "task-1777182" [ 1375.196863] env[62619]: _type = "Task" [ 1375.196863] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.206822] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.288240] env[62619]: DEBUG nova.compute.manager [req-5017dd21-59e0-4535-a101-5ed52e15ee5a req-a156cb07-e69d-406f-87e9-c828671de99d service nova] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Received event network-vif-deleted-149b04f5-c179-4c37-9c3d-7293f5342eca {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1375.423499] env[62619]: DEBUG nova.network.neutron [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Successfully updated port: 9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1375.449075] env[62619]: DEBUG nova.compute.utils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1375.450799] env[62619]: DEBUG nova.compute.manager [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1375.451214] env[62619]: DEBUG nova.network.neutron [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1375.501592] env[62619]: INFO nova.compute.manager [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Took 22.02 seconds to build instance. [ 1375.671542] env[62619]: DEBUG oslo_vmware.api [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777180, 'name': PowerOnVM_Task, 'duration_secs': 0.626759} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.671542] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1375.671853] env[62619]: INFO nova.compute.manager [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Took 11.70 seconds to spawn the instance on the hypervisor. [ 1375.672106] env[62619]: DEBUG nova.compute.manager [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1375.673185] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd52f41-d1cc-4406-a2ae-622a13b98ae5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.691164] env[62619]: DEBUG nova.policy [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '697fc164fa36494a852a1b06147e87b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '637829eb9a594ecc9e6103612182ca30', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1375.708412] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777182, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.925958] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "refresh_cache-fb231b38-950e-4c86-bfe5-4c10a304910f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1375.926132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquired lock "refresh_cache-fb231b38-950e-4c86-bfe5-4c10a304910f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.926295] env[62619]: DEBUG nova.network.neutron [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1375.951642] env[62619]: DEBUG nova.compute.manager [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1376.005811] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f0fbd1ad-f040-4eb7-b7e8-ff1e2b9c0071 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lock "1847c5d8-16eb-4feb-8a09-24ad6728e59c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.717s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.214183] env[62619]: INFO nova.compute.manager [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Took 27.48 seconds to build instance. [ 1376.226406] env[62619]: DEBUG oslo_vmware.api [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777182, 'name': PowerOnVM_Task, 'duration_secs': 0.974443} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.229830] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1376.230846] env[62619]: INFO nova.compute.manager [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Took 9.76 seconds to spawn the instance on the hypervisor. [ 1376.232243] env[62619]: DEBUG nova.compute.manager [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1376.232802] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37b3110-3548-4636-ba9b-adb627b18e68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.485480] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c66e1dc-fe1a-41d8-a9a7-9522d72e6a71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.493167] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffd7c37-996b-4d36-8d9d-fea133f80a47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.534117] env[62619]: DEBUG nova.compute.manager [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1376.537926] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14adc7d9-c4de-4eb4-95d0-f9b799257599 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.546555] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9691af2e-5175-43e5-af3f-013fba41ac8b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.561622] env[62619]: DEBUG nova.compute.provider_tree [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.563491] env[62619]: DEBUG nova.network.neutron [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1376.718103] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39fef01d-cd79-49d5-9ae0-18bf720c5760 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.004s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.761366] env[62619]: INFO nova.compute.manager [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Took 24.39 seconds to build instance. [ 1376.805327] env[62619]: DEBUG oslo_vmware.rw_handles [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bfeced-0ab9-3e23-0e46-827a13fe9683/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1376.806192] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a248eff1-8fac-49c4-89c1-e66062959b9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.813735] env[62619]: DEBUG oslo_vmware.rw_handles [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bfeced-0ab9-3e23-0e46-827a13fe9683/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1376.813932] env[62619]: ERROR oslo_vmware.rw_handles [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bfeced-0ab9-3e23-0e46-827a13fe9683/disk-0.vmdk due to incomplete transfer. [ 1376.815591] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-eccb8298-6699-47d8-af9b-dcc8b91865d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.823477] env[62619]: DEBUG oslo_vmware.rw_handles [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bfeced-0ab9-3e23-0e46-827a13fe9683/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1376.823747] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Uploaded image 6e31803f-a937-40b2-93d4-307e624edb77 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1376.825806] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1376.825989] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0fb31da0-dbf5-49e0-8968-c327c3f53190 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.834581] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1376.834581] env[62619]: value = "task-1777183" [ 1376.834581] env[62619]: _type = "Task" [ 1376.834581] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.844977] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777183, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.864481] env[62619]: DEBUG nova.network.neutron [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Successfully created port: cfe84dde-a706-4b94-9382-35d5056a97d2 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1376.971278] env[62619]: DEBUG nova.compute.manager [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1377.007409] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1377.007973] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1377.007973] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1377.008377] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1377.008377] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1377.008512] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1377.008703] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1377.008894] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1377.009701] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1377.011521] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1377.011521] env[62619]: DEBUG nova.virt.hardware [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1377.011521] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92a73df-98fa-48ea-aa52-8eddd820d2c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.024534] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8341f308-e6a3-4010-988b-e81a344e94e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.044859] env[62619]: DEBUG nova.network.neutron [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Updating instance_info_cache with network_info: [{"id": "9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777", "address": "fa:16:3e:ac:9c:44", "network": {"id": "712c7a9c-8039-44f9-91d1-27991eef432a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-366397882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7dea13f34f140dd98291849f66720ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e3b6fd0-aa", "ovs_interfaceid": "9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.067749] env[62619]: DEBUG nova.scheduler.client.report [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1377.082704] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.116954] env[62619]: DEBUG nova.compute.manager [None req-13c25243-b204-48e4-a26e-177def9a9e78 tempest-ServerDiagnosticsV248Test-645052477 tempest-ServerDiagnosticsV248Test-645052477-project-admin] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1377.118934] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f7906c-f843-4269-a0ad-3d3f576bdb19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.127581] env[62619]: INFO nova.compute.manager [None req-13c25243-b204-48e4-a26e-177def9a9e78 tempest-ServerDiagnosticsV248Test-645052477 tempest-ServerDiagnosticsV248Test-645052477-project-admin] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Retrieving diagnostics [ 1377.128639] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c227f085-2317-4e0b-b8d9-6df128eab046 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.221012] env[62619]: DEBUG nova.compute.manager [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1377.264056] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75b1be0d-3632-4205-b870-3e3e1c0cd211 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lock "ed34ae20-a891-45aa-8124-f36f264937f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.689s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.347887] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777183, 'name': Destroy_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.530715] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquiring lock "7c058337-1684-4553-8e96-dd2cd1814a15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.530891] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lock "7c058337-1684-4553-8e96-dd2cd1814a15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.550761] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Releasing lock "refresh_cache-fb231b38-950e-4c86-bfe5-4c10a304910f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.551217] env[62619]: DEBUG nova.compute.manager [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Instance network_info: |[{"id": "9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777", "address": "fa:16:3e:ac:9c:44", "network": {"id": "712c7a9c-8039-44f9-91d1-27991eef432a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-366397882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7dea13f34f140dd98291849f66720ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e3b6fd0-aa", "ovs_interfaceid": "9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1377.551576] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:9c:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02092ea4-bae0-4e42-b0ab-abc365b4395a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1377.560442] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Creating folder: Project (b7dea13f34f140dd98291849f66720ad). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1377.562291] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eca1d0b4-d373-45e6-97f3-1fce29af3e53 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.575241] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.575241] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1377.581473] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.354s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.583350] env[62619]: INFO nova.compute.claims [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1377.585997] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Created folder: Project (b7dea13f34f140dd98291849f66720ad) in parent group-v368875. [ 1377.586450] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Creating folder: Instances. Parent ref: group-v368912. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1377.586879] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1e2e3f1-e946-4555-910f-75d29f95c8dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.600450] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Created folder: Instances in parent group-v368912. [ 1377.601161] env[62619]: DEBUG oslo.service.loopingcall [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1377.601161] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1377.601161] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-911f5910-3590-45a2-93aa-a124dd6c3f2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.625804] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1377.625804] env[62619]: value = "task-1777186" [ 1377.625804] env[62619]: _type = "Task" [ 1377.625804] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.642197] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777186, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.764341] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.768857] env[62619]: DEBUG nova.compute.manager [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1377.849141] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777183, 'name': Destroy_Task, 'duration_secs': 0.532424} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.849141] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Destroyed the VM [ 1377.849141] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1377.849141] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-087ab728-9391-4380-8633-9108e2e03665 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.858955] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1377.858955] env[62619]: value = "task-1777187" [ 1377.858955] env[62619]: _type = "Task" [ 1377.858955] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.869999] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777187, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.091333] env[62619]: DEBUG nova.compute.utils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1378.097816] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1378.097816] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1378.138321] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777186, 'name': CreateVM_Task, 'duration_secs': 0.422203} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.138553] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1378.140029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.140029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.140029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1378.140252] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5a2ef4d-6055-4c54-9e10-4d7f84d9cfbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.146270] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1378.146270] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52370bf2-a4ed-6481-2bd6-e87cbc80e6cc" [ 1378.146270] env[62619]: _type = "Task" [ 1378.146270] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.160946] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52370bf2-a4ed-6481-2bd6-e87cbc80e6cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.193995] env[62619]: DEBUG nova.policy [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f2bd9597b434c1d8f861968794a9070', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f397d7b5794041daad38e58b63d3e8ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1378.300050] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.375648] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777187, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.416873] env[62619]: DEBUG nova.network.neutron [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Successfully updated port: 2de36e51-915e-40de-9915-eb1bca9827ad {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1378.600879] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1378.677183] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52370bf2-a4ed-6481-2bd6-e87cbc80e6cc, 'name': SearchDatastore_Task, 'duration_secs': 0.013931} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.682399] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.682971] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1378.683293] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.683499] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.684846] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1378.686401] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-307ce437-e6a3-48be-8af3-e16562194290 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.700318] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1378.700318] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1378.704212] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f81c220-5b2b-48a3-9929-72b1281f526b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.710708] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1378.710708] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52967c37-7cd2-772e-8b52-cd9f15a123fe" [ 1378.710708] env[62619]: _type = "Task" [ 1378.710708] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.725919] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52967c37-7cd2-772e-8b52-cd9f15a123fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.836626] env[62619]: DEBUG nova.compute.manager [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Received event network-vif-plugged-9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1378.836881] env[62619]: DEBUG oslo_concurrency.lockutils [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] Acquiring lock "fb231b38-950e-4c86-bfe5-4c10a304910f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.837118] env[62619]: DEBUG oslo_concurrency.lockutils [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] Lock "fb231b38-950e-4c86-bfe5-4c10a304910f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.837292] env[62619]: DEBUG oslo_concurrency.lockutils [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] Lock "fb231b38-950e-4c86-bfe5-4c10a304910f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.837487] env[62619]: DEBUG nova.compute.manager [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] No waiting events found dispatching network-vif-plugged-9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1378.837669] env[62619]: WARNING nova.compute.manager [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Received unexpected event network-vif-plugged-9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777 for instance with vm_state building and task_state spawning. [ 1378.837834] env[62619]: DEBUG nova.compute.manager [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Received event network-changed-9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1378.837978] env[62619]: DEBUG nova.compute.manager [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Refreshing instance network info cache due to event network-changed-9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1378.838205] env[62619]: DEBUG oslo_concurrency.lockutils [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] Acquiring lock "refresh_cache-fb231b38-950e-4c86-bfe5-4c10a304910f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.838338] env[62619]: DEBUG oslo_concurrency.lockutils [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] Acquired lock "refresh_cache-fb231b38-950e-4c86-bfe5-4c10a304910f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.838608] env[62619]: DEBUG nova.network.neutron [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Refreshing network info cache for port 9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1378.874858] env[62619]: DEBUG oslo_vmware.api [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777187, 'name': RemoveSnapshot_Task, 'duration_secs': 0.865757} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.875182] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1378.875435] env[62619]: INFO nova.compute.manager [None req-07881740-f3e0-46b0-9810-a181a17ad327 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Took 15.08 seconds to snapshot the instance on the hypervisor. [ 1378.919296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "refresh_cache-4ee81568-ad9a-4ded-b6fe-15503d85968e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.919515] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquired lock "refresh_cache-4ee81568-ad9a-4ded-b6fe-15503d85968e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.919734] env[62619]: DEBUG nova.network.neutron [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1379.094300] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Successfully created port: 4bd598be-9cdb-42cd-a9b0-53e9eb3622ec {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1379.154770] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3bf78f-9252-4cda-9f20-b4acd9edeaea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.165815] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf48ec3f-93b1-4eb4-9d41-18035a29b526 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.200192] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab49ea61-c66d-4b86-a9b2-1aa92acd50b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.208810] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43632a05-ae51-47e3-8353-4ebcc8da4b8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.226061] env[62619]: DEBUG nova.compute.provider_tree [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.230907] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52967c37-7cd2-772e-8b52-cd9f15a123fe, 'name': SearchDatastore_Task, 'duration_secs': 0.012974} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.232007] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63688ed6-afff-40fa-b44a-1951b406bfa1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.238657] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1379.238657] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c8497d-38ad-1d68-f79e-ede699a4310f" [ 1379.238657] env[62619]: _type = "Task" [ 1379.238657] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.247928] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c8497d-38ad-1d68-f79e-ede699a4310f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.550659] env[62619]: DEBUG nova.network.neutron [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1379.614517] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1379.647525] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1379.647525] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1379.647525] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1379.647707] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1379.647707] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1379.647707] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1379.647707] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1379.647707] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1379.647933] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1379.648318] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1379.648723] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1379.649941] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b64ed4-e7e2-469a-9104-4acd6ce8caa6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.659532] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19db9387-f1c3-4720-b535-19a5d01f9ba9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.732767] env[62619]: DEBUG nova.scheduler.client.report [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1379.749287] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c8497d-38ad-1d68-f79e-ede699a4310f, 'name': SearchDatastore_Task, 'duration_secs': 0.011885} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.749989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.749989] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] fb231b38-950e-4c86-bfe5-4c10a304910f/fb231b38-950e-4c86-bfe5-4c10a304910f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1379.750717] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbde6261-4924-4570-8f3f-8bb02dc7a59c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.758868] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1379.758868] env[62619]: value = "task-1777188" [ 1379.758868] env[62619]: _type = "Task" [ 1379.758868] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.767932] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777188, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.924215] env[62619]: DEBUG nova.compute.manager [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1380.238305] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.238886] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1380.241534] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.462s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.241760] env[62619]: DEBUG nova.objects.instance [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Lazy-loading 'resources' on Instance uuid aa576459-65bf-4b16-ad1d-0930497522eb {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1380.271971] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777188, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.441875] env[62619]: DEBUG nova.network.neutron [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Updating instance_info_cache with network_info: [{"id": "2de36e51-915e-40de-9915-eb1bca9827ad", "address": "fa:16:3e:58:8f:8e", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2de36e51-91", "ovs_interfaceid": "2de36e51-915e-40de-9915-eb1bca9827ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.464194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.507927] env[62619]: DEBUG nova.network.neutron [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Updated VIF entry in instance network info cache for port 9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1380.508300] env[62619]: DEBUG nova.network.neutron [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Updating instance_info_cache with network_info: [{"id": "9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777", "address": "fa:16:3e:ac:9c:44", "network": {"id": "712c7a9c-8039-44f9-91d1-27991eef432a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-366397882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7dea13f34f140dd98291849f66720ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e3b6fd0-aa", "ovs_interfaceid": "9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.616246] env[62619]: DEBUG nova.network.neutron [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Successfully updated port: cfe84dde-a706-4b94-9382-35d5056a97d2 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1380.745576] env[62619]: DEBUG nova.compute.utils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1380.753115] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1380.753115] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1380.775298] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777188, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521117} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.775548] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] fb231b38-950e-4c86-bfe5-4c10a304910f/fb231b38-950e-4c86-bfe5-4c10a304910f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1380.775767] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1380.776076] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-561981af-62db-49cc-98ad-78ff3a375a80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.787795] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1380.787795] env[62619]: value = "task-1777189" [ 1380.787795] env[62619]: _type = "Task" [ 1380.787795] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.800228] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777189, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.888746] env[62619]: DEBUG nova.policy [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f2bd9597b434c1d8f861968794a9070', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f397d7b5794041daad38e58b63d3e8ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1380.944344] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Releasing lock "refresh_cache-4ee81568-ad9a-4ded-b6fe-15503d85968e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.944418] env[62619]: DEBUG nova.compute.manager [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Instance network_info: |[{"id": "2de36e51-915e-40de-9915-eb1bca9827ad", "address": "fa:16:3e:58:8f:8e", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2de36e51-91", "ovs_interfaceid": "2de36e51-915e-40de-9915-eb1bca9827ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1380.948897] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:8f:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2de36e51-915e-40de-9915-eb1bca9827ad', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1380.962366] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Creating folder: Project (7a12b52b8da24c44806817fff5661ff0). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.965969] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a55d7f9-e333-4b6a-9b91-d7e1fb089062 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.984383] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Created folder: Project (7a12b52b8da24c44806817fff5661ff0) in parent group-v368875. [ 1380.984383] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Creating folder: Instances. Parent ref: group-v368915. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.984383] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db6c6249-fd20-4431-83b8-855659d7c205 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.007619] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Created folder: Instances in parent group-v368915. [ 1381.008192] env[62619]: DEBUG oslo.service.loopingcall [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.008192] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1381.008386] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de815ae7-aa20-4a44-8bda-d4c5d31ab601 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.029847] env[62619]: DEBUG oslo_concurrency.lockutils [req-dc5c172a-5fe8-4869-b16d-e68270993291 req-6c74a15e-b2d2-4120-992f-56abb6401587 service nova] Releasing lock "refresh_cache-fb231b38-950e-4c86-bfe5-4c10a304910f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.041334] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1381.041334] env[62619]: value = "task-1777192" [ 1381.041334] env[62619]: _type = "Task" [ 1381.041334] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.055548] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777192, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.097506] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "6be4f813-7171-4515-a728-5cf34665205a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.097791] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "6be4f813-7171-4515-a728-5cf34665205a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.121795] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquiring lock "refresh_cache-f46de981-1f04-4baf-874c-de1b95d16f9d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.121795] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquired lock "refresh_cache-f46de981-1f04-4baf-874c-de1b95d16f9d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.121795] env[62619]: DEBUG nova.network.neutron [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1381.176196] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Successfully updated port: 4bd598be-9cdb-42cd-a9b0-53e9eb3622ec {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1381.255271] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1381.277880] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de062f0b-fa85-41fd-bfe0-fa857d198415 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.287651] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716951e9-055b-4dba-b33a-d1f00da73513 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.335711] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777189, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.343204} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.336294] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1381.337230] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce95c1a-5146-4937-bbbf-226b6662c50a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.340877] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fd27ff-ee03-4588-b00e-6043a2bf1e01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.361396] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aacc3c6-828a-4144-917a-c849d92d7249 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.376798] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] fb231b38-950e-4c86-bfe5-4c10a304910f/fb231b38-950e-4c86-bfe5-4c10a304910f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1381.377483] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26df9efe-b705-437c-9fde-3f517a736d20 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.404169] env[62619]: DEBUG nova.compute.provider_tree [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.409644] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1381.409644] env[62619]: value = "task-1777193" [ 1381.409644] env[62619]: _type = "Task" [ 1381.409644] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.420489] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777193, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.556707] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777192, 'name': CreateVM_Task, 'duration_secs': 0.462452} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.556898] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1381.557610] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.557804] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.558125] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1381.558367] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3c98493-cbb5-48a5-abe7-645715d02bac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.567056] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1381.567056] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527d4e6a-09a0-00c7-8645-84f772b0c738" [ 1381.567056] env[62619]: _type = "Task" [ 1381.567056] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.575940] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527d4e6a-09a0-00c7-8645-84f772b0c738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.671410] env[62619]: DEBUG nova.network.neutron [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1381.679667] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "refresh_cache-d7b2d831-b2ae-445c-887b-290171ae5d80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.679667] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired lock "refresh_cache-d7b2d831-b2ae-445c-887b-290171ae5d80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.679667] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1381.835318] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Successfully created port: 789bc80f-8a7f-49a0-8500-7c2ea007446c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1381.904316] env[62619]: DEBUG nova.network.neutron [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Updating instance_info_cache with network_info: [{"id": "cfe84dde-a706-4b94-9382-35d5056a97d2", "address": "fa:16:3e:82:5c:55", "network": {"id": "7dbd62d0-3a8a-4d8d-9307-e8a4b3ec0aba", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1616710183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "637829eb9a594ecc9e6103612182ca30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfe84dde-a7", "ovs_interfaceid": "cfe84dde-a706-4b94-9382-35d5056a97d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.913928] env[62619]: DEBUG nova.scheduler.client.report [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1381.932980] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.973740] env[62619]: DEBUG nova.compute.manager [req-7137ec81-4faa-456f-8088-2da31db42ced req-b4110265-d90d-49d2-8be9-e09e073d2ad0 service nova] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Received event network-vif-plugged-4bd598be-9cdb-42cd-a9b0-53e9eb3622ec {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1381.973740] env[62619]: DEBUG oslo_concurrency.lockutils [req-7137ec81-4faa-456f-8088-2da31db42ced req-b4110265-d90d-49d2-8be9-e09e073d2ad0 service nova] Acquiring lock "d7b2d831-b2ae-445c-887b-290171ae5d80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.973740] env[62619]: DEBUG oslo_concurrency.lockutils [req-7137ec81-4faa-456f-8088-2da31db42ced req-b4110265-d90d-49d2-8be9-e09e073d2ad0 service nova] Lock "d7b2d831-b2ae-445c-887b-290171ae5d80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.974203] env[62619]: DEBUG oslo_concurrency.lockutils [req-7137ec81-4faa-456f-8088-2da31db42ced req-b4110265-d90d-49d2-8be9-e09e073d2ad0 service nova] Lock "d7b2d831-b2ae-445c-887b-290171ae5d80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.974244] env[62619]: DEBUG nova.compute.manager [req-7137ec81-4faa-456f-8088-2da31db42ced req-b4110265-d90d-49d2-8be9-e09e073d2ad0 service nova] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] No waiting events found dispatching network-vif-plugged-4bd598be-9cdb-42cd-a9b0-53e9eb3622ec {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1381.974747] env[62619]: WARNING nova.compute.manager [req-7137ec81-4faa-456f-8088-2da31db42ced req-b4110265-d90d-49d2-8be9-e09e073d2ad0 service nova] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Received unexpected event network-vif-plugged-4bd598be-9cdb-42cd-a9b0-53e9eb3622ec for instance with vm_state building and task_state spawning. [ 1381.994360] env[62619]: DEBUG nova.compute.manager [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Received event network-vif-plugged-2de36e51-915e-40de-9915-eb1bca9827ad {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1381.994578] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Acquiring lock "4ee81568-ad9a-4ded-b6fe-15503d85968e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.996055] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Lock "4ee81568-ad9a-4ded-b6fe-15503d85968e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.996193] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Lock "4ee81568-ad9a-4ded-b6fe-15503d85968e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.996821] env[62619]: DEBUG nova.compute.manager [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] No waiting events found dispatching network-vif-plugged-2de36e51-915e-40de-9915-eb1bca9827ad {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1381.996821] env[62619]: WARNING nova.compute.manager [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Received unexpected event network-vif-plugged-2de36e51-915e-40de-9915-eb1bca9827ad for instance with vm_state building and task_state spawning. [ 1381.996821] env[62619]: DEBUG nova.compute.manager [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Received event network-changed-2de36e51-915e-40de-9915-eb1bca9827ad {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1381.997018] env[62619]: DEBUG nova.compute.manager [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Refreshing instance network info cache due to event network-changed-2de36e51-915e-40de-9915-eb1bca9827ad. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1381.997813] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Acquiring lock "refresh_cache-4ee81568-ad9a-4ded-b6fe-15503d85968e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.997813] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Acquired lock "refresh_cache-4ee81568-ad9a-4ded-b6fe-15503d85968e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.997813] env[62619]: DEBUG nova.network.neutron [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Refreshing network info cache for port 2de36e51-915e-40de-9915-eb1bca9827ad {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1382.079801] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527d4e6a-09a0-00c7-8645-84f772b0c738, 'name': SearchDatastore_Task, 'duration_secs': 0.011879} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.080220] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.080449] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1382.080721] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.080896] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.081223] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1382.081465] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37ef082c-c3fa-454b-bc0b-706f0b83936f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.092398] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1382.093757] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1382.094414] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-057616d8-bbdf-4ff3-97ab-3e044c67d3f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.107047] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1382.107047] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52febdb3-3c7d-d7bd-f795-669c07b82143" [ 1382.107047] env[62619]: _type = "Task" [ 1382.107047] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.118597] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52febdb3-3c7d-d7bd-f795-669c07b82143, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.220952] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1382.271488] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1382.306220] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1382.306511] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1382.306687] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1382.309018] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1382.309018] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1382.309018] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1382.309018] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1382.309018] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1382.309321] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1382.309321] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1382.309321] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1382.310025] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f748f3-41a6-409d-b081-875574bc031d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.321038] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0324fb-37ca-4d67-be86-24468fcad7ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.375430] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Updating instance_info_cache with network_info: [{"id": "4bd598be-9cdb-42cd-a9b0-53e9eb3622ec", "address": "fa:16:3e:86:8f:9a", "network": {"id": "15849cd3-d13d-439e-a637-b0a924a3fc16", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1468882389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f397d7b5794041daad38e58b63d3e8ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd598be-9c", "ovs_interfaceid": "4bd598be-9cdb-42cd-a9b0-53e9eb3622ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.406742] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Releasing lock "refresh_cache-f46de981-1f04-4baf-874c-de1b95d16f9d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.407107] env[62619]: DEBUG nova.compute.manager [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Instance network_info: |[{"id": "cfe84dde-a706-4b94-9382-35d5056a97d2", "address": "fa:16:3e:82:5c:55", "network": {"id": "7dbd62d0-3a8a-4d8d-9307-e8a4b3ec0aba", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1616710183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "637829eb9a594ecc9e6103612182ca30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfe84dde-a7", "ovs_interfaceid": "cfe84dde-a706-4b94-9382-35d5056a97d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1382.407788] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:5c:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cfe84dde-a706-4b94-9382-35d5056a97d2', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1382.415875] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Creating folder: Project (637829eb9a594ecc9e6103612182ca30). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1382.416505] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02a84de2-9ce4-4150-9271-b54309ef5686 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.423694] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.182s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.427088] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.498s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.428418] env[62619]: INFO nova.compute.claims [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1382.436218] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Created folder: Project (637829eb9a594ecc9e6103612182ca30) in parent group-v368875. [ 1382.436218] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Creating folder: Instances. Parent ref: group-v368918. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1382.438695] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b07aa76c-4248-46c4-9a34-b3063d45e721 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.441115] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777193, 'name': ReconfigVM_Task, 'duration_secs': 0.745097} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.441448] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Reconfigured VM instance instance-0000000d to attach disk [datastore1] fb231b38-950e-4c86-bfe5-4c10a304910f/fb231b38-950e-4c86-bfe5-4c10a304910f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1382.442632] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f643ed0-d7b5-4692-9ff5-3ba04a688889 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.451313] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1382.451313] env[62619]: value = "task-1777196" [ 1382.451313] env[62619]: _type = "Task" [ 1382.451313] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.457966] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Created folder: Instances in parent group-v368918. [ 1382.458723] env[62619]: DEBUG oslo.service.loopingcall [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1382.459225] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1382.459570] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d81b46b-949d-4176-9deb-a567f4e7f593 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.482139] env[62619]: INFO nova.scheduler.client.report [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Deleted allocations for instance aa576459-65bf-4b16-ad1d-0930497522eb [ 1382.486812] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777196, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.495153] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1382.495153] env[62619]: value = "task-1777197" [ 1382.495153] env[62619]: _type = "Task" [ 1382.495153] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.505168] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777197, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.596467] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "08c1fef9-40fc-4420-91de-fe911dea70f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.596467] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "08c1fef9-40fc-4420-91de-fe911dea70f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.596467] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "08c1fef9-40fc-4420-91de-fe911dea70f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.596467] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "08c1fef9-40fc-4420-91de-fe911dea70f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.596774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "08c1fef9-40fc-4420-91de-fe911dea70f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.601971] env[62619]: INFO nova.compute.manager [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Terminating instance [ 1382.617554] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52febdb3-3c7d-d7bd-f795-669c07b82143, 'name': SearchDatastore_Task, 'duration_secs': 0.011577} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.618923] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbe35cd1-9965-412d-b989-0bc532eaa18c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.626151] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1382.626151] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bb5dd3-1a0f-9f0a-8655-a06b1a99c671" [ 1382.626151] env[62619]: _type = "Task" [ 1382.626151] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.639090] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bb5dd3-1a0f-9f0a-8655-a06b1a99c671, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.814155] env[62619]: DEBUG nova.network.neutron [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Updated VIF entry in instance network info cache for port 2de36e51-915e-40de-9915-eb1bca9827ad. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1382.814575] env[62619]: DEBUG nova.network.neutron [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Updating instance_info_cache with network_info: [{"id": "2de36e51-915e-40de-9915-eb1bca9827ad", "address": "fa:16:3e:58:8f:8e", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2de36e51-91", "ovs_interfaceid": "2de36e51-915e-40de-9915-eb1bca9827ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.876831] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Releasing lock "refresh_cache-d7b2d831-b2ae-445c-887b-290171ae5d80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.877199] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Instance network_info: |[{"id": "4bd598be-9cdb-42cd-a9b0-53e9eb3622ec", "address": "fa:16:3e:86:8f:9a", "network": {"id": "15849cd3-d13d-439e-a637-b0a924a3fc16", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1468882389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f397d7b5794041daad38e58b63d3e8ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd598be-9c", "ovs_interfaceid": "4bd598be-9cdb-42cd-a9b0-53e9eb3622ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1382.877653] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:8f:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4bd598be-9cdb-42cd-a9b0-53e9eb3622ec', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1382.891593] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Creating folder: Project (f397d7b5794041daad38e58b63d3e8ea). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1382.892845] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9ab7bbe-2359-4134-8955-9129a87b8935 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.908688] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Created folder: Project (f397d7b5794041daad38e58b63d3e8ea) in parent group-v368875. [ 1382.908891] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Creating folder: Instances. Parent ref: group-v368921. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1382.909252] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e61c0e9-e045-42a2-9e69-65e39b2cd30a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.920982] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Created folder: Instances in parent group-v368921. [ 1382.920982] env[62619]: DEBUG oslo.service.loopingcall [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1382.920982] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1382.920982] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e723f64-435a-4e2a-bc86-17e96da5944d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.945789] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1382.945789] env[62619]: value = "task-1777200" [ 1382.945789] env[62619]: _type = "Task" [ 1382.945789] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.960794] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777200, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.964842] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777196, 'name': Rename_Task, 'duration_secs': 0.224662} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.964842] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1382.965228] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7a41008-5805-4ce5-a95d-0cd6fe13ac0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.972069] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1382.972069] env[62619]: value = "task-1777201" [ 1382.972069] env[62619]: _type = "Task" [ 1382.972069] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.984213] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777201, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.997218] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eff042e2-c09a-45b9-bb21-644b6175c2b0 tempest-DeleteServersAdminTestJSON-1358370278 tempest-DeleteServersAdminTestJSON-1358370278-project-admin] Lock "aa576459-65bf-4b16-ad1d-0930497522eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.143s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.011729] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777197, 'name': CreateVM_Task, 'duration_secs': 0.421389} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.012135] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1383.013165] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.013403] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.013944] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1383.015328] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ef8e793-b089-4692-a6eb-a9dae9f25447 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.023571] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for the task: (returnval){ [ 1383.023571] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ada86e-fa67-0a74-7235-088daed56a6d" [ 1383.023571] env[62619]: _type = "Task" [ 1383.023571] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.036374] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ada86e-fa67-0a74-7235-088daed56a6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.105987] env[62619]: DEBUG nova.compute.manager [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1383.105987] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1383.106864] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25adbe16-9d23-4c7c-8e32-583a2a87d976 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.115867] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1383.115867] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16440fdf-bdcf-41ca-a0bd-e2f22de2a2ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.139941] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bb5dd3-1a0f-9f0a-8655-a06b1a99c671, 'name': SearchDatastore_Task, 'duration_secs': 0.012644} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.140622] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.140622] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4ee81568-ad9a-4ded-b6fe-15503d85968e/4ee81568-ad9a-4ded-b6fe-15503d85968e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1383.140925] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e50fcde0-c88c-40b7-8325-a92b940459f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.150771] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1383.150771] env[62619]: value = "task-1777203" [ 1383.150771] env[62619]: _type = "Task" [ 1383.150771] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.160656] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777203, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.198139] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1383.199905] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1383.200191] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleting the datastore file [datastore1] 08c1fef9-40fc-4420-91de-fe911dea70f7 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1383.200488] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb367b13-b216-4dc7-a65b-12475f65f0b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.208703] env[62619]: DEBUG oslo_vmware.api [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1383.208703] env[62619]: value = "task-1777204" [ 1383.208703] env[62619]: _type = "Task" [ 1383.208703] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.217886] env[62619]: DEBUG oslo_vmware.api [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.318487] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Releasing lock "refresh_cache-4ee81568-ad9a-4ded-b6fe-15503d85968e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.318487] env[62619]: DEBUG nova.compute.manager [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Received event network-vif-plugged-cfe84dde-a706-4b94-9382-35d5056a97d2 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1383.318487] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Acquiring lock "f46de981-1f04-4baf-874c-de1b95d16f9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.318487] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Lock "f46de981-1f04-4baf-874c-de1b95d16f9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.318487] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Lock "f46de981-1f04-4baf-874c-de1b95d16f9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.318834] env[62619]: DEBUG nova.compute.manager [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] No waiting events found dispatching network-vif-plugged-cfe84dde-a706-4b94-9382-35d5056a97d2 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1383.318834] env[62619]: WARNING nova.compute.manager [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Received unexpected event network-vif-plugged-cfe84dde-a706-4b94-9382-35d5056a97d2 for instance with vm_state building and task_state spawning. [ 1383.319081] env[62619]: DEBUG nova.compute.manager [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Received event network-changed-cfe84dde-a706-4b94-9382-35d5056a97d2 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1383.319280] env[62619]: DEBUG nova.compute.manager [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Refreshing instance network info cache due to event network-changed-cfe84dde-a706-4b94-9382-35d5056a97d2. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1383.319508] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Acquiring lock "refresh_cache-f46de981-1f04-4baf-874c-de1b95d16f9d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.319703] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Acquired lock "refresh_cache-f46de981-1f04-4baf-874c-de1b95d16f9d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.319912] env[62619]: DEBUG nova.network.neutron [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Refreshing network info cache for port cfe84dde-a706-4b94-9382-35d5056a97d2 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.466653] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777200, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.493255] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777201, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.536471] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ada86e-fa67-0a74-7235-088daed56a6d, 'name': SearchDatastore_Task, 'duration_secs': 0.01389} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.536968] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.538331] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1383.538331] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.538331] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.538331] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1383.538536] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3b9e725-a750-4a73-948f-a218e57bf38c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.565669] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1383.566583] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1383.569884] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f6116fd-7fa3-4998-9458-4bf8a9ef5c01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.578905] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for the task: (returnval){ [ 1383.578905] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5207e3fa-54e8-f99d-bdd2-4329c9c48660" [ 1383.578905] env[62619]: _type = "Task" [ 1383.578905] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.593960] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5207e3fa-54e8-f99d-bdd2-4329c9c48660, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.668985] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777203, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.723937] env[62619]: DEBUG oslo_vmware.api [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.470737} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.727326] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1383.727530] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1383.727697] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1383.727867] env[62619]: INFO nova.compute.manager [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1383.728643] env[62619]: DEBUG oslo.service.loopingcall [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1383.728730] env[62619]: DEBUG nova.compute.manager [-] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1383.728774] env[62619]: DEBUG nova.network.neutron [-] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1383.973613] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777200, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.998139] env[62619]: DEBUG oslo_vmware.api [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777201, 'name': PowerOnVM_Task, 'duration_secs': 0.834416} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.005098] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1384.005098] env[62619]: INFO nova.compute.manager [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Took 12.39 seconds to spawn the instance on the hypervisor. [ 1384.005309] env[62619]: DEBUG nova.compute.manager [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1384.006693] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2183d743-3f79-4dac-8145-cb242fc6b21d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.081249] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a208967-46df-482e-a3de-e6a70584983f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.096507] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ee3d23-13c8-4170-969d-4e696dd05c76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.099869] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5207e3fa-54e8-f99d-bdd2-4329c9c48660, 'name': SearchDatastore_Task, 'duration_secs': 0.05168} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.101928] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6129d5f-d870-4b61-af6f-1151a37aa542 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.135690] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1636f2b3-ff2f-4c5a-bbd3-8b6da7530483 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.141088] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for the task: (returnval){ [ 1384.141088] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520842b0-e0a5-b589-bfa6-3f947ed80e63" [ 1384.141088] env[62619]: _type = "Task" [ 1384.141088] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.149295] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a390a6-7982-4a62-87bf-66905e855e60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.158252] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520842b0-e0a5-b589-bfa6-3f947ed80e63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.165675] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777203, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537746} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.175965] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4ee81568-ad9a-4ded-b6fe-15503d85968e/4ee81568-ad9a-4ded-b6fe-15503d85968e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1384.176244] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1384.176731] env[62619]: DEBUG nova.compute.provider_tree [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1384.179684] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-18029d5b-16db-41d9-b787-75607727e5eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.191312] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1384.191312] env[62619]: value = "task-1777205" [ 1384.191312] env[62619]: _type = "Task" [ 1384.191312] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.202241] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777205, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.380156] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "2a41be15-efaf-4e78-a278-2711cb11e98f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.380156] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "2a41be15-efaf-4e78-a278-2711cb11e98f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.420195] env[62619]: DEBUG nova.network.neutron [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Updated VIF entry in instance network info cache for port cfe84dde-a706-4b94-9382-35d5056a97d2. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1384.420563] env[62619]: DEBUG nova.network.neutron [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Updating instance_info_cache with network_info: [{"id": "cfe84dde-a706-4b94-9382-35d5056a97d2", "address": "fa:16:3e:82:5c:55", "network": {"id": "7dbd62d0-3a8a-4d8d-9307-e8a4b3ec0aba", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1616710183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "637829eb9a594ecc9e6103612182ca30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfe84dde-a7", "ovs_interfaceid": "cfe84dde-a706-4b94-9382-35d5056a97d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.461049] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777200, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.486784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquiring lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.487053] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.533095] env[62619]: INFO nova.compute.manager [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Took 30.63 seconds to build instance. [ 1384.615285] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Successfully updated port: 789bc80f-8a7f-49a0-8500-7c2ea007446c {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1384.672131] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520842b0-e0a5-b589-bfa6-3f947ed80e63, 'name': SearchDatastore_Task, 'duration_secs': 0.014975} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.688707] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.688707] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] f46de981-1f04-4baf-874c-de1b95d16f9d/f46de981-1f04-4baf-874c-de1b95d16f9d.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1384.688707] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-408567d8-8442-472c-b8f9-f6f193e0ef05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.688707] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for the task: (returnval){ [ 1384.688707] env[62619]: value = "task-1777206" [ 1384.688707] env[62619]: _type = "Task" [ 1384.688707] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.689372] env[62619]: DEBUG nova.scheduler.client.report [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1384.700822] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.705691] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777205, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099482} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.705995] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1384.706820] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89eafc94-6999-48dd-836f-e71721daee4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.713922] env[62619]: DEBUG nova.network.neutron [-] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.739902] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 4ee81568-ad9a-4ded-b6fe-15503d85968e/4ee81568-ad9a-4ded-b6fe-15503d85968e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1384.741770] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd3d8601-1154-41a7-9499-682fb9011a5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.767022] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1384.767022] env[62619]: value = "task-1777207" [ 1384.767022] env[62619]: _type = "Task" [ 1384.767022] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.777521] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777207, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.923517] env[62619]: DEBUG oslo_concurrency.lockutils [req-222fffc0-28f4-4f75-8fb9-cc2f688692d1 req-2a5099b4-58e3-4186-804e-6461e95d26a5 service nova] Releasing lock "refresh_cache-f46de981-1f04-4baf-874c-de1b95d16f9d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.961936] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777200, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.034806] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf859b20-1117-451b-899c-8558ac145640 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "fb231b38-950e-4c86-bfe5-4c10a304910f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.347s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.118412] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "refresh_cache-312aed5b-a66e-4428-ac1b-483dc2b38291" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.118562] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired lock "refresh_cache-312aed5b-a66e-4428-ac1b-483dc2b38291" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.118656] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1385.189929] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.190191] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1385.197431] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.441s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.197707] env[62619]: DEBUG nova.objects.instance [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lazy-loading 'resources' on Instance uuid 4f08d36b-f26e-499e-a4be-d8cbb481a44d {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1385.199555] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.241436] env[62619]: INFO nova.compute.manager [-] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Took 1.51 seconds to deallocate network for instance. [ 1385.276977] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777207, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.400530] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "ef41dd29-1270-4071-9e89-20132131de2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.400883] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "ef41dd29-1270-4071-9e89-20132131de2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.464402] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777200, 'name': CreateVM_Task, 'duration_secs': 2.441039} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.464587] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1385.465318] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.465486] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.465815] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1385.466455] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21b93779-ba73-4b63-8398-df79222acb45 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.473254] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1385.473254] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52852242-a1c8-3282-b5b2-fb012680c051" [ 1385.473254] env[62619]: _type = "Task" [ 1385.473254] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.485291] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52852242-a1c8-3282-b5b2-fb012680c051, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.537937] env[62619]: DEBUG nova.compute.manager [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1385.595460] env[62619]: DEBUG nova.compute.manager [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Received event network-changed-3af5cb8d-ea8b-4677-920d-5e06ecc2843b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1385.595602] env[62619]: DEBUG nova.compute.manager [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Refreshing instance network info cache due to event network-changed-3af5cb8d-ea8b-4677-920d-5e06ecc2843b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1385.596701] env[62619]: DEBUG oslo_concurrency.lockutils [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] Acquiring lock "refresh_cache-ed34ae20-a891-45aa-8124-f36f264937f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.596701] env[62619]: DEBUG oslo_concurrency.lockutils [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] Acquired lock "refresh_cache-ed34ae20-a891-45aa-8124-f36f264937f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.596701] env[62619]: DEBUG nova.network.neutron [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Refreshing network info cache for port 3af5cb8d-ea8b-4677-920d-5e06ecc2843b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1385.679807] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1385.696380] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777206, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.707377] env[62619]: DEBUG nova.compute.utils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1385.714277] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1385.714277] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1385.732665] env[62619]: DEBUG nova.compute.manager [req-310796ce-33ac-4729-81d2-9c7133628eb0 req-7033cb30-5afe-4176-b2be-330549d07060 service nova] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Received event network-vif-plugged-789bc80f-8a7f-49a0-8500-7c2ea007446c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1385.732884] env[62619]: DEBUG oslo_concurrency.lockutils [req-310796ce-33ac-4729-81d2-9c7133628eb0 req-7033cb30-5afe-4176-b2be-330549d07060 service nova] Acquiring lock "312aed5b-a66e-4428-ac1b-483dc2b38291-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.733116] env[62619]: DEBUG oslo_concurrency.lockutils [req-310796ce-33ac-4729-81d2-9c7133628eb0 req-7033cb30-5afe-4176-b2be-330549d07060 service nova] Lock "312aed5b-a66e-4428-ac1b-483dc2b38291-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.733288] env[62619]: DEBUG oslo_concurrency.lockutils [req-310796ce-33ac-4729-81d2-9c7133628eb0 req-7033cb30-5afe-4176-b2be-330549d07060 service nova] Lock "312aed5b-a66e-4428-ac1b-483dc2b38291-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.733456] env[62619]: DEBUG nova.compute.manager [req-310796ce-33ac-4729-81d2-9c7133628eb0 req-7033cb30-5afe-4176-b2be-330549d07060 service nova] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] No waiting events found dispatching network-vif-plugged-789bc80f-8a7f-49a0-8500-7c2ea007446c {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1385.733618] env[62619]: WARNING nova.compute.manager [req-310796ce-33ac-4729-81d2-9c7133628eb0 req-7033cb30-5afe-4176-b2be-330549d07060 service nova] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Received unexpected event network-vif-plugged-789bc80f-8a7f-49a0-8500-7c2ea007446c for instance with vm_state building and task_state spawning. [ 1385.749352] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.779447] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777207, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.836313] env[62619]: DEBUG nova.policy [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f2bd9597b434c1d8f861968794a9070', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f397d7b5794041daad38e58b63d3e8ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1385.989898] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52852242-a1c8-3282-b5b2-fb012680c051, 'name': SearchDatastore_Task, 'duration_secs': 0.05397} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.992545] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.992807] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1385.993044] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.993190] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.993654] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1385.993995] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eccb5bfd-a0cf-446b-9585-b4271f69dc8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.006017] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1386.006017] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1386.006017] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1949fc16-5668-4bdc-8b36-730fa0bf947b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.018165] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1386.018165] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529c55c6-46e7-7e51-fdfe-68aaea9f4626" [ 1386.018165] env[62619]: _type = "Task" [ 1386.018165] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.031026] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529c55c6-46e7-7e51-fdfe-68aaea9f4626, 'name': SearchDatastore_Task, 'duration_secs': 0.009812} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.032572] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75b9c882-aafb-45f4-9a80-7e5e36292639 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.042569] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1386.042569] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52961119-306b-6878-ee67-17f0fa3aa39b" [ 1386.042569] env[62619]: _type = "Task" [ 1386.042569] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.058709] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52961119-306b-6878-ee67-17f0fa3aa39b, 'name': SearchDatastore_Task, 'duration_secs': 0.009748} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.058796] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.059034] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d7b2d831-b2ae-445c-887b-290171ae5d80/d7b2d831-b2ae-445c-887b-290171ae5d80.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1386.059991] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a99f48b-a237-41ae-9bb4-62e7f0b663e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.068960] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1386.068960] env[62619]: value = "task-1777208" [ 1386.068960] env[62619]: _type = "Task" [ 1386.068960] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.073331] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.078800] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.082414] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Updating instance_info_cache with network_info: [{"id": "789bc80f-8a7f-49a0-8500-7c2ea007446c", "address": "fa:16:3e:43:30:a8", "network": {"id": "15849cd3-d13d-439e-a637-b0a924a3fc16", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1468882389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f397d7b5794041daad38e58b63d3e8ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789bc80f-8a", "ovs_interfaceid": "789bc80f-8a7f-49a0-8500-7c2ea007446c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.191727] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777206, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.05321} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.194757] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] f46de981-1f04-4baf-874c-de1b95d16f9d/f46de981-1f04-4baf-874c-de1b95d16f9d.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1386.194757] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1386.194757] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f6aafc0-322b-4131-96fd-edd73ee6d10d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.203429] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for the task: (returnval){ [ 1386.203429] env[62619]: value = "task-1777209" [ 1386.203429] env[62619]: _type = "Task" [ 1386.203429] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.215898] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777209, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.222195] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1386.232774] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad7a7c5-28e1-4b0b-b375-9fa61e181d54 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.245267] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7fe2d1-77bd-4071-8f6b-482072fd501f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.286054] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9434b5-5bfd-43c0-8a77-6b773c3ecc38 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.295958] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777207, 'name': ReconfigVM_Task, 'duration_secs': 1.091458} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.296497] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 4ee81568-ad9a-4ded-b6fe-15503d85968e/4ee81568-ad9a-4ded-b6fe-15503d85968e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1386.298082] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5f7e88-e507-4be0-a10f-e305bf456ab7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.302043] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a671658d-8ee9-4af4-9a4f-245b45d1b7d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.315593] env[62619]: DEBUG nova.compute.provider_tree [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.318464] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1386.318464] env[62619]: value = "task-1777210" [ 1386.318464] env[62619]: _type = "Task" [ 1386.318464] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.329996] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777210, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.588047] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Releasing lock "refresh_cache-312aed5b-a66e-4428-ac1b-483dc2b38291" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.588047] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Instance network_info: |[{"id": "789bc80f-8a7f-49a0-8500-7c2ea007446c", "address": "fa:16:3e:43:30:a8", "network": {"id": "15849cd3-d13d-439e-a637-b0a924a3fc16", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1468882389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f397d7b5794041daad38e58b63d3e8ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789bc80f-8a", "ovs_interfaceid": "789bc80f-8a7f-49a0-8500-7c2ea007446c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1386.588533] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777208, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.588533] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:30:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '789bc80f-8a7f-49a0-8500-7c2ea007446c', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1386.595740] env[62619]: DEBUG oslo.service.loopingcall [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.598015] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1386.598298] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b71944e3-374e-4d74-9266-13d9a075b0c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.622473] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1386.622473] env[62619]: value = "task-1777211" [ 1386.622473] env[62619]: _type = "Task" [ 1386.622473] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.636856] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777211, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.714078] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777209, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075062} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.714373] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1386.715217] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fc677a-8a46-4b6e-9e59-1fb833352bd0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.743157] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] f46de981-1f04-4baf-874c-de1b95d16f9d/f46de981-1f04-4baf-874c-de1b95d16f9d.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1386.743777] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7f34bfe-fd80-4a2b-99aa-6580f9d5c2dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.766341] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for the task: (returnval){ [ 1386.766341] env[62619]: value = "task-1777212" [ 1386.766341] env[62619]: _type = "Task" [ 1386.766341] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.778498] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777212, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.820693] env[62619]: DEBUG nova.scheduler.client.report [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1386.835044] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777210, 'name': Rename_Task, 'duration_secs': 0.421202} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.835347] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1386.835612] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-526528fb-6f35-4c12-a94b-98d47529a139 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.844536] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1386.844536] env[62619]: value = "task-1777213" [ 1386.844536] env[62619]: _type = "Task" [ 1386.844536] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.860173] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777213, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.000423] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Successfully created port: b4a31a23-98c4-445a-8fe4-36fc6013e543 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1387.081665] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.562384} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.082229] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d7b2d831-b2ae-445c-887b-290171ae5d80/d7b2d831-b2ae-445c-887b-290171ae5d80.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1387.084353] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1387.084353] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4bda4559-9bcc-4d22-8d3a-ac820fbe989d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.090965] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1387.090965] env[62619]: value = "task-1777214" [ 1387.090965] env[62619]: _type = "Task" [ 1387.090965] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.102479] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777214, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.116839] env[62619]: DEBUG nova.network.neutron [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Updated VIF entry in instance network info cache for port 3af5cb8d-ea8b-4677-920d-5e06ecc2843b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1387.117259] env[62619]: DEBUG nova.network.neutron [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Updating instance_info_cache with network_info: [{"id": "3af5cb8d-ea8b-4677-920d-5e06ecc2843b", "address": "fa:16:3e:e3:16:0b", "network": {"id": "73bdc485-118f-4877-afde-edd7e6119b94", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1994939706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9a96018b71f4e6db335ff7deeb6c4b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3af5cb8d-ea", "ovs_interfaceid": "3af5cb8d-ea8b-4677-920d-5e06ecc2843b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.142728] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777211, 'name': CreateVM_Task, 'duration_secs': 0.451445} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.142728] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1387.142728] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.142728] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.143141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1387.143339] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d89ebd4e-88ea-4ab6-bdfe-9d4c16124fa4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.153220] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1387.153220] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52990be6-cc60-8b57-77bb-bbdfe25ca45f" [ 1387.153220] env[62619]: _type = "Task" [ 1387.153220] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.164641] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52990be6-cc60-8b57-77bb-bbdfe25ca45f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.245535] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1387.284281] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777212, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.295392] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1387.295708] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1387.296129] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1387.296129] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1387.296699] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1387.296699] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1387.296699] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1387.296848] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1387.297075] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1387.297159] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1387.298115] env[62619]: DEBUG nova.virt.hardware [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1387.298584] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a5b5a4-4b81-4354-b33c-93c394d37ca9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.307638] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8458ec9-0798-4278-81ff-19438e80e007 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.329664] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.132s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.332037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.160s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.336525] env[62619]: INFO nova.compute.claims [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1387.356276] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777213, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.362127] env[62619]: INFO nova.scheduler.client.report [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Deleted allocations for instance 4f08d36b-f26e-499e-a4be-d8cbb481a44d [ 1387.604941] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777214, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071943} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.605381] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1387.606311] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487db8fa-747e-41b1-a50d-c2f442a5cdc7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.629034] env[62619]: DEBUG oslo_concurrency.lockutils [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] Releasing lock "refresh_cache-ed34ae20-a891-45aa-8124-f36f264937f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.629034] env[62619]: DEBUG nova.compute.manager [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Received event network-changed-4bd598be-9cdb-42cd-a9b0-53e9eb3622ec {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1387.629034] env[62619]: DEBUG nova.compute.manager [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Refreshing instance network info cache due to event network-changed-4bd598be-9cdb-42cd-a9b0-53e9eb3622ec. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1387.629034] env[62619]: DEBUG oslo_concurrency.lockutils [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] Acquiring lock "refresh_cache-d7b2d831-b2ae-445c-887b-290171ae5d80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.629034] env[62619]: DEBUG oslo_concurrency.lockutils [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] Acquired lock "refresh_cache-d7b2d831-b2ae-445c-887b-290171ae5d80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.629284] env[62619]: DEBUG nova.network.neutron [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Refreshing network info cache for port 4bd598be-9cdb-42cd-a9b0-53e9eb3622ec {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1387.647554] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] d7b2d831-b2ae-445c-887b-290171ae5d80/d7b2d831-b2ae-445c-887b-290171ae5d80.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1387.650894] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-726fae88-04c0-474e-91c5-deb30db75341 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.684095] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52990be6-cc60-8b57-77bb-bbdfe25ca45f, 'name': SearchDatastore_Task, 'duration_secs': 0.01446} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.685494] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.685494] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1387.685884] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.685884] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.686127] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1387.686365] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1387.686365] env[62619]: value = "task-1777215" [ 1387.686365] env[62619]: _type = "Task" [ 1387.686365] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.686594] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b618375-d967-44a2-823a-a96aba7ceaa4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.698908] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777215, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.701983] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1387.702135] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1387.703022] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b985cf01-b9e3-43e0-a936-e8abed7eb208 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.711719] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1387.711719] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52483336-b0f9-a767-f3ba-5c95e76d542a" [ 1387.711719] env[62619]: _type = "Task" [ 1387.711719] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.721818] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52483336-b0f9-a767-f3ba-5c95e76d542a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.776552] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777212, 'name': ReconfigVM_Task, 'duration_secs': 0.52473} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.779208] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Reconfigured VM instance instance-0000000f to attach disk [datastore1] f46de981-1f04-4baf-874c-de1b95d16f9d/f46de981-1f04-4baf-874c-de1b95d16f9d.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1387.779866] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed9f14b0-104d-427d-b9a1-9dcf65535e05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.789573] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for the task: (returnval){ [ 1387.789573] env[62619]: value = "task-1777216" [ 1387.789573] env[62619]: _type = "Task" [ 1387.789573] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.800802] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777216, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.857971] env[62619]: DEBUG oslo_vmware.api [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777213, 'name': PowerOnVM_Task, 'duration_secs': 0.639283} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.858309] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1387.858545] env[62619]: INFO nova.compute.manager [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Took 13.58 seconds to spawn the instance on the hypervisor. [ 1387.858711] env[62619]: DEBUG nova.compute.manager [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1387.859894] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b361ac1-056c-4528-84c1-01da61e8c077 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.874318] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3615966-f5e3-4d5d-b1b6-bd97f94672fc tempest-ServerDiagnosticsTest-1743271589 tempest-ServerDiagnosticsTest-1743271589-project-member] Lock "4f08d36b-f26e-499e-a4be-d8cbb481a44d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.880s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.203693] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777215, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.225721] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52483336-b0f9-a767-f3ba-5c95e76d542a, 'name': SearchDatastore_Task, 'duration_secs': 0.018039} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.228996] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f318f174-f6f2-4914-a1ea-4533f959baae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.236406] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1388.236406] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef4fe6-0445-b1c4-3ea0-b416f013bfd6" [ 1388.236406] env[62619]: _type = "Task" [ 1388.236406] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.248111] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef4fe6-0445-b1c4-3ea0-b416f013bfd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.304975] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777216, 'name': Rename_Task, 'duration_secs': 0.476271} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.304975] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1388.304975] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-004f5e7d-9d0d-43a1-b466-53739457567c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.313133] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for the task: (returnval){ [ 1388.313133] env[62619]: value = "task-1777217" [ 1388.313133] env[62619]: _type = "Task" [ 1388.313133] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.323891] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777217, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.341521] env[62619]: DEBUG nova.network.neutron [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Updated VIF entry in instance network info cache for port 4bd598be-9cdb-42cd-a9b0-53e9eb3622ec. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1388.341521] env[62619]: DEBUG nova.network.neutron [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Updating instance_info_cache with network_info: [{"id": "4bd598be-9cdb-42cd-a9b0-53e9eb3622ec", "address": "fa:16:3e:86:8f:9a", "network": {"id": "15849cd3-d13d-439e-a637-b0a924a3fc16", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1468882389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f397d7b5794041daad38e58b63d3e8ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd598be-9c", "ovs_interfaceid": "4bd598be-9cdb-42cd-a9b0-53e9eb3622ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.383071] env[62619]: INFO nova.compute.manager [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Took 31.66 seconds to build instance. [ 1388.708862] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777215, 'name': ReconfigVM_Task, 'duration_secs': 0.811896} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.708862] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Reconfigured VM instance instance-00000010 to attach disk [datastore1] d7b2d831-b2ae-445c-887b-290171ae5d80/d7b2d831-b2ae-445c-887b-290171ae5d80.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1388.708862] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d94221e-8d4d-43de-8ade-2f72d73252d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.716570] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1388.716570] env[62619]: value = "task-1777218" [ 1388.716570] env[62619]: _type = "Task" [ 1388.716570] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.725867] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777218, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.754709] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef4fe6-0445-b1c4-3ea0-b416f013bfd6, 'name': SearchDatastore_Task, 'duration_secs': 0.02553} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.754709] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.754709] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 312aed5b-a66e-4428-ac1b-483dc2b38291/312aed5b-a66e-4428-ac1b-483dc2b38291.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1388.754709] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43d3bd21-60e7-46ca-b67f-2f107904f09f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.763950] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1388.763950] env[62619]: value = "task-1777219" [ 1388.763950] env[62619]: _type = "Task" [ 1388.763950] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.772920] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.839022] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777217, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.849848] env[62619]: DEBUG oslo_concurrency.lockutils [req-54514c5f-18d5-4223-9de2-831bfe7833fd req-bd83b322-a7c8-49a7-b315-360ef424479f service nova] Releasing lock "refresh_cache-d7b2d831-b2ae-445c-887b-290171ae5d80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.886459] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57007abe-c28c-4d5a-b2ea-34081d426442 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "4ee81568-ad9a-4ded-b6fe-15503d85968e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.452s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.975511] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d420f5c-c8a9-4cac-a44b-5da1bd20611c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.986114] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd035b53-4b2a-4a04-a00b-d02be8738001 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.025381] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4953d8-b935-42a9-81ad-c42668866e64 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.037511] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7501e2a1-d652-47d2-a017-e33575142ee9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.054185] env[62619]: DEBUG nova.compute.provider_tree [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1389.227656] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777218, 'name': Rename_Task, 'duration_secs': 0.194035} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.228526] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1389.228526] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ab7cd1a-33d2-4312-afe8-d9a58f4874bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.238028] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1389.238028] env[62619]: value = "task-1777220" [ 1389.238028] env[62619]: _type = "Task" [ 1389.238028] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.244499] env[62619]: DEBUG nova.compute.manager [req-85d78276-02f0-410c-a065-93de20cec3c8 req-1b71c3c8-9201-439a-97d8-24e1024602e9 service nova] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Received event network-vif-deleted-a5a1b25b-7dc6-4b53-90e5-f43a90048197 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1389.248476] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777220, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.276629] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777219, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.299533] env[62619]: DEBUG nova.compute.manager [None req-3715e4d0-2aee-4345-96bc-4b823a35ac9a tempest-ServerDiagnosticsV248Test-645052477 tempest-ServerDiagnosticsV248Test-645052477-project-admin] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1389.300852] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce39d8c9-f2b5-4a2c-bc5e-29112600d8a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.310345] env[62619]: INFO nova.compute.manager [None req-3715e4d0-2aee-4345-96bc-4b823a35ac9a tempest-ServerDiagnosticsV248Test-645052477 tempest-ServerDiagnosticsV248Test-645052477-project-admin] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Retrieving diagnostics [ 1389.311661] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f545f7-b4d4-43c0-aedb-00054576292e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.320589] env[62619]: DEBUG nova.compute.manager [req-b633d115-39d6-4cbb-89e9-04dae734325c req-cc11b55e-1d5b-4121-b94e-ebfca0b66b3b service nova] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Received event network-changed-789bc80f-8a7f-49a0-8500-7c2ea007446c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1389.320923] env[62619]: DEBUG nova.compute.manager [req-b633d115-39d6-4cbb-89e9-04dae734325c req-cc11b55e-1d5b-4121-b94e-ebfca0b66b3b service nova] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Refreshing instance network info cache due to event network-changed-789bc80f-8a7f-49a0-8500-7c2ea007446c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1389.321052] env[62619]: DEBUG oslo_concurrency.lockutils [req-b633d115-39d6-4cbb-89e9-04dae734325c req-cc11b55e-1d5b-4121-b94e-ebfca0b66b3b service nova] Acquiring lock "refresh_cache-312aed5b-a66e-4428-ac1b-483dc2b38291" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.321385] env[62619]: DEBUG oslo_concurrency.lockutils [req-b633d115-39d6-4cbb-89e9-04dae734325c req-cc11b55e-1d5b-4121-b94e-ebfca0b66b3b service nova] Acquired lock "refresh_cache-312aed5b-a66e-4428-ac1b-483dc2b38291" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.321385] env[62619]: DEBUG nova.network.neutron [req-b633d115-39d6-4cbb-89e9-04dae734325c req-cc11b55e-1d5b-4121-b94e-ebfca0b66b3b service nova] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Refreshing network info cache for port 789bc80f-8a7f-49a0-8500-7c2ea007446c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1389.361477] env[62619]: DEBUG oslo_vmware.api [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777217, 'name': PowerOnVM_Task, 'duration_secs': 0.681931} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.362300] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1389.362555] env[62619]: INFO nova.compute.manager [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Took 12.39 seconds to spawn the instance on the hypervisor. [ 1389.362808] env[62619]: DEBUG nova.compute.manager [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1389.364068] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe83815c-06e9-4152-9bd4-6fd7b7b9dee7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.393469] env[62619]: DEBUG nova.compute.manager [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1389.561794] env[62619]: DEBUG nova.scheduler.client.report [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1389.747075] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777220, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.776527] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563006} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.776527] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 312aed5b-a66e-4428-ac1b-483dc2b38291/312aed5b-a66e-4428-ac1b-483dc2b38291.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1389.776527] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1389.776725] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad9afd7f-16c3-4e8a-9a58-4939e1b7d9fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.785229] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1389.785229] env[62619]: value = "task-1777221" [ 1389.785229] env[62619]: _type = "Task" [ 1389.785229] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.798294] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.843508] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Successfully updated port: b4a31a23-98c4-445a-8fe4-36fc6013e543 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1389.891122] env[62619]: INFO nova.compute.manager [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Took 31.27 seconds to build instance. [ 1389.926152] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.994837] env[62619]: DEBUG nova.network.neutron [req-b633d115-39d6-4cbb-89e9-04dae734325c req-cc11b55e-1d5b-4121-b94e-ebfca0b66b3b service nova] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Updated VIF entry in instance network info cache for port 789bc80f-8a7f-49a0-8500-7c2ea007446c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1389.995212] env[62619]: DEBUG nova.network.neutron [req-b633d115-39d6-4cbb-89e9-04dae734325c req-cc11b55e-1d5b-4121-b94e-ebfca0b66b3b service nova] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Updating instance_info_cache with network_info: [{"id": "789bc80f-8a7f-49a0-8500-7c2ea007446c", "address": "fa:16:3e:43:30:a8", "network": {"id": "15849cd3-d13d-439e-a637-b0a924a3fc16", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1468882389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f397d7b5794041daad38e58b63d3e8ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789bc80f-8a", "ovs_interfaceid": "789bc80f-8a7f-49a0-8500-7c2ea007446c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.074178] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.742s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.074785] env[62619]: DEBUG nova.compute.manager [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1390.077326] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.410s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.077458] env[62619]: DEBUG nova.objects.instance [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lazy-loading 'resources' on Instance uuid e98120b4-7916-4ce4-88ef-0c904852bb1f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1390.249527] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777220, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.295909] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777221, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069171} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.296202] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1390.297190] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0802a476-cac9-4839-b46f-57d5396474dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.322254] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 312aed5b-a66e-4428-ac1b-483dc2b38291/312aed5b-a66e-4428-ac1b-483dc2b38291.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.322598] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2149bf2e-e356-4081-8df8-c5f3424a2c05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.342741] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1390.342741] env[62619]: value = "task-1777222" [ 1390.342741] env[62619]: _type = "Task" [ 1390.342741] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.345919] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "refresh_cache-e9ca5148-f188-4a15-83ae-8f3d730b0dab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.346077] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired lock "refresh_cache-e9ca5148-f188-4a15-83ae-8f3d730b0dab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.346215] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1390.352550] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777222, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.393698] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3badf9b6-b2f4-43d5-a7bf-486c7f27c781 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lock "f46de981-1f04-4baf-874c-de1b95d16f9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.796s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.501051] env[62619]: DEBUG oslo_concurrency.lockutils [req-b633d115-39d6-4cbb-89e9-04dae734325c req-cc11b55e-1d5b-4121-b94e-ebfca0b66b3b service nova] Releasing lock "refresh_cache-312aed5b-a66e-4428-ac1b-483dc2b38291" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.585802] env[62619]: DEBUG nova.compute.utils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1390.588125] env[62619]: DEBUG nova.compute.manager [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1390.588521] env[62619]: DEBUG nova.network.neutron [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1390.659584] env[62619]: DEBUG nova.policy [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a369e10f86e14ee3b4201cf175ed03b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22106ed527594810885b6891b382c3ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1390.710378] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.710378] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1390.751751] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777220, 'name': PowerOnVM_Task, 'duration_secs': 1.044875} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.754617] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1390.754826] env[62619]: INFO nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Took 11.14 seconds to spawn the instance on the hypervisor. [ 1390.755017] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1390.756174] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b8d20e-cc34-4d30-9460-a57e584140f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.859019] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777222, 'name': ReconfigVM_Task, 'duration_secs': 0.322195} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.859019] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 312aed5b-a66e-4428-ac1b-483dc2b38291/312aed5b-a66e-4428-ac1b-483dc2b38291.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1390.859262] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be691f92-e066-4291-9c54-a544d382194e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.865919] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1390.865919] env[62619]: value = "task-1777223" [ 1390.865919] env[62619]: _type = "Task" [ 1390.865919] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.875754] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777223, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.895922] env[62619]: DEBUG nova.compute.manager [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1390.919284] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1391.092381] env[62619]: DEBUG nova.compute.manager [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1391.139293] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3736f4-dca2-4a33-ae4b-7d0c21552942 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.147965] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f495676-6f12-45c2-a687-03e6c43658a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.179907] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae59a9d0-5b95-46a6-9209-ea8fb3c8ae69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.188017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab9f452-3e8d-4066-9672-e064e9c5319f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.206460] env[62619]: DEBUG nova.compute.provider_tree [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1391.282238] env[62619]: INFO nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Took 31.32 seconds to build instance. [ 1391.295146] env[62619]: DEBUG nova.network.neutron [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Updating instance_info_cache with network_info: [{"id": "b4a31a23-98c4-445a-8fe4-36fc6013e543", "address": "fa:16:3e:14:33:c3", "network": {"id": "15849cd3-d13d-439e-a637-b0a924a3fc16", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1468882389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f397d7b5794041daad38e58b63d3e8ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4a31a23-98", "ovs_interfaceid": "b4a31a23-98c4-445a-8fe4-36fc6013e543", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.356618] env[62619]: DEBUG nova.network.neutron [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Successfully created port: 3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1391.379625] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777223, 'name': Rename_Task, 'duration_secs': 0.214791} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.379931] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1391.381544] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9522a4ed-89c3-46f4-b130-24ab585a5eff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.389512] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1391.389512] env[62619]: value = "task-1777224" [ 1391.389512] env[62619]: _type = "Task" [ 1391.389512] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.397654] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777224, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.421899] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.710194] env[62619]: DEBUG nova.scheduler.client.report [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1391.784239] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "d7b2d831-b2ae-445c-887b-290171ae5d80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.974s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.797398] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Releasing lock "refresh_cache-e9ca5148-f188-4a15-83ae-8f3d730b0dab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.798360] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Instance network_info: |[{"id": "b4a31a23-98c4-445a-8fe4-36fc6013e543", "address": "fa:16:3e:14:33:c3", "network": {"id": "15849cd3-d13d-439e-a637-b0a924a3fc16", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1468882389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f397d7b5794041daad38e58b63d3e8ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4a31a23-98", "ovs_interfaceid": "b4a31a23-98c4-445a-8fe4-36fc6013e543", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1391.798498] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:33:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4a31a23-98c4-445a-8fe4-36fc6013e543', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1391.808184] env[62619]: DEBUG oslo.service.loopingcall [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1391.809408] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1391.809745] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1fc8f55c-8b82-4727-afb1-03f6bd69c4f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.834676] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1391.834676] env[62619]: value = "task-1777225" [ 1391.834676] env[62619]: _type = "Task" [ 1391.834676] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.846218] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777225, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.900276] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777224, 'name': PowerOnVM_Task, 'duration_secs': 0.474109} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.900626] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1391.900766] env[62619]: INFO nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Took 9.63 seconds to spawn the instance on the hypervisor. [ 1391.900938] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1391.901781] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d08346-276a-408d-95f2-76c9e2507a51 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.050409] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquiring lock "1847c5d8-16eb-4feb-8a09-24ad6728e59c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.050409] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lock "1847c5d8-16eb-4feb-8a09-24ad6728e59c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.050409] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquiring lock "1847c5d8-16eb-4feb-8a09-24ad6728e59c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.050409] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lock "1847c5d8-16eb-4feb-8a09-24ad6728e59c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.050783] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lock "1847c5d8-16eb-4feb-8a09-24ad6728e59c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.051701] env[62619]: INFO nova.compute.manager [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Terminating instance [ 1392.106191] env[62619]: DEBUG nova.compute.manager [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1392.153021] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1392.153021] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1392.153021] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1392.153504] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1392.153504] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1392.153504] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1392.153504] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1392.153504] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1392.153754] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1392.153754] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1392.153754] env[62619]: DEBUG nova.virt.hardware [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1392.154325] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e92e5d-8306-48cd-8e7e-decc84a5778f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.167227] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9683f30f-a0ca-40d6-beb5-44dd9ae84ad5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.215533] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.138s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.220114] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.303s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.220114] env[62619]: DEBUG nova.objects.instance [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lazy-loading 'resources' on Instance uuid 28a8485c-fc0d-4fd0-8be9-37c49caf89b8 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1392.253363] env[62619]: INFO nova.scheduler.client.report [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Deleted allocations for instance e98120b4-7916-4ce4-88ef-0c904852bb1f [ 1392.293620] env[62619]: DEBUG nova.compute.manager [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1392.346375] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777225, 'name': CreateVM_Task, 'duration_secs': 0.457277} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.346375] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1392.346641] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.346733] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.347177] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1392.347900] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b048052c-62fb-4e25-8a85-7e60454cd08a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.353231] env[62619]: DEBUG nova.compute.manager [req-221ed63d-5b57-4752-9333-71f2c8a9934e req-47d6ed31-5083-4be5-9fad-7e3f1415d28e service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Received event network-changed-9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1392.353231] env[62619]: DEBUG nova.compute.manager [req-221ed63d-5b57-4752-9333-71f2c8a9934e req-47d6ed31-5083-4be5-9fad-7e3f1415d28e service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Refreshing instance network info cache due to event network-changed-9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1392.353349] env[62619]: DEBUG oslo_concurrency.lockutils [req-221ed63d-5b57-4752-9333-71f2c8a9934e req-47d6ed31-5083-4be5-9fad-7e3f1415d28e service nova] Acquiring lock "refresh_cache-fb231b38-950e-4c86-bfe5-4c10a304910f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.353858] env[62619]: DEBUG oslo_concurrency.lockutils [req-221ed63d-5b57-4752-9333-71f2c8a9934e req-47d6ed31-5083-4be5-9fad-7e3f1415d28e service nova] Acquired lock "refresh_cache-fb231b38-950e-4c86-bfe5-4c10a304910f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.353858] env[62619]: DEBUG nova.network.neutron [req-221ed63d-5b57-4752-9333-71f2c8a9934e req-47d6ed31-5083-4be5-9fad-7e3f1415d28e service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Refreshing network info cache for port 9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1392.358426] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1392.358426] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528bf866-3f61-32fa-941a-18ce0521d84d" [ 1392.358426] env[62619]: _type = "Task" [ 1392.358426] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.371700] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528bf866-3f61-32fa-941a-18ce0521d84d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.375064] env[62619]: DEBUG nova.compute.manager [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Received event network-vif-plugged-b4a31a23-98c4-445a-8fe4-36fc6013e543 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1392.375255] env[62619]: DEBUG oslo_concurrency.lockutils [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] Acquiring lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.375574] env[62619]: DEBUG oslo_concurrency.lockutils [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] Lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.376389] env[62619]: DEBUG oslo_concurrency.lockutils [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] Lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.376504] env[62619]: DEBUG nova.compute.manager [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] No waiting events found dispatching network-vif-plugged-b4a31a23-98c4-445a-8fe4-36fc6013e543 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1392.377174] env[62619]: WARNING nova.compute.manager [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Received unexpected event network-vif-plugged-b4a31a23-98c4-445a-8fe4-36fc6013e543 for instance with vm_state building and task_state spawning. [ 1392.377174] env[62619]: DEBUG nova.compute.manager [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Received event network-changed-b4a31a23-98c4-445a-8fe4-36fc6013e543 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1392.377174] env[62619]: DEBUG nova.compute.manager [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Refreshing instance network info cache due to event network-changed-b4a31a23-98c4-445a-8fe4-36fc6013e543. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1392.377413] env[62619]: DEBUG oslo_concurrency.lockutils [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] Acquiring lock "refresh_cache-e9ca5148-f188-4a15-83ae-8f3d730b0dab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.377413] env[62619]: DEBUG oslo_concurrency.lockutils [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] Acquired lock "refresh_cache-e9ca5148-f188-4a15-83ae-8f3d730b0dab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.377487] env[62619]: DEBUG nova.network.neutron [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Refreshing network info cache for port b4a31a23-98c4-445a-8fe4-36fc6013e543 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1392.424640] env[62619]: INFO nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Took 30.22 seconds to build instance. [ 1392.556064] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquiring lock "refresh_cache-1847c5d8-16eb-4feb-8a09-24ad6728e59c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.556786] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquired lock "refresh_cache-1847c5d8-16eb-4feb-8a09-24ad6728e59c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.556786] env[62619]: DEBUG nova.network.neutron [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1392.762726] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15a03e3c-ad45-49fa-afce-24077f7d405f tempest-TenantUsagesTestJSON-252562970 tempest-TenantUsagesTestJSON-252562970-project-member] Lock "e98120b4-7916-4ce4-88ef-0c904852bb1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.934s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.816936] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.878919] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528bf866-3f61-32fa-941a-18ce0521d84d, 'name': SearchDatastore_Task, 'duration_secs': 0.023263} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.881354] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.881632] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1392.882087] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.886126] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.886394] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1392.886734] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e8d1c73-5284-427f-b2b4-11d84ea16b7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.902162] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1392.904389] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1392.907980] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4a591aa-05be-4ffb-aa67-3cd415daed2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.913883] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1392.913883] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529ef30c-1b5b-bae2-f311-bd7364aa610d" [ 1392.913883] env[62619]: _type = "Task" [ 1392.913883] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.924200] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529ef30c-1b5b-bae2-f311-bd7364aa610d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.926664] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "312aed5b-a66e-4428-ac1b-483dc2b38291" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.083s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.110595] env[62619]: DEBUG nova.network.neutron [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1393.289623] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "94c9a119-5c04-4550-b55d-a4a2985385d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.289623] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "94c9a119-5c04-4550-b55d-a4a2985385d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.290694] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b14300c-8d4e-4053-8e6b-291e22d4b0e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.299455] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08fe3c1-cfc5-47d1-9e3a-53572029ee11 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.335153] env[62619]: DEBUG nova.network.neutron [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Updated VIF entry in instance network info cache for port b4a31a23-98c4-445a-8fe4-36fc6013e543. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1393.335588] env[62619]: DEBUG nova.network.neutron [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Updating instance_info_cache with network_info: [{"id": "b4a31a23-98c4-445a-8fe4-36fc6013e543", "address": "fa:16:3e:14:33:c3", "network": {"id": "15849cd3-d13d-439e-a637-b0a924a3fc16", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1468882389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f397d7b5794041daad38e58b63d3e8ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4a31a23-98", "ovs_interfaceid": "b4a31a23-98c4-445a-8fe4-36fc6013e543", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.341589] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cd641c-bc3d-48ad-ba61-d991609b72ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.345348] env[62619]: DEBUG nova.network.neutron [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.353910] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b982ca08-b2ab-4cd3-bf5a-6393b6e70333 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.370621] env[62619]: DEBUG nova.compute.provider_tree [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1393.427739] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529ef30c-1b5b-bae2-f311-bd7364aa610d, 'name': SearchDatastore_Task, 'duration_secs': 0.014588} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.428742] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4789ef6-8df9-4764-b24b-8dac0e593bc9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.431027] env[62619]: DEBUG nova.compute.manager [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1393.437334] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1393.437334] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5275adf5-16c0-3893-2532-b4bc7d7ab167" [ 1393.437334] env[62619]: _type = "Task" [ 1393.437334] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.446414] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5275adf5-16c0-3893-2532-b4bc7d7ab167, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.647800] env[62619]: DEBUG nova.network.neutron [req-221ed63d-5b57-4752-9333-71f2c8a9934e req-47d6ed31-5083-4be5-9fad-7e3f1415d28e service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Updated VIF entry in instance network info cache for port 9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1393.647800] env[62619]: DEBUG nova.network.neutron [req-221ed63d-5b57-4752-9333-71f2c8a9934e req-47d6ed31-5083-4be5-9fad-7e3f1415d28e service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Updating instance_info_cache with network_info: [{"id": "9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777", "address": "fa:16:3e:ac:9c:44", "network": {"id": "712c7a9c-8039-44f9-91d1-27991eef432a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-366397882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7dea13f34f140dd98291849f66720ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e3b6fd0-aa", "ovs_interfaceid": "9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.709352] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.855706] env[62619]: DEBUG oslo_concurrency.lockutils [req-1d38aca5-e558-4348-a3de-fb4413544d17 req-41d64eb3-f5c1-49d6-a3ba-26e64ef98471 service nova] Releasing lock "refresh_cache-e9ca5148-f188-4a15-83ae-8f3d730b0dab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.855706] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Releasing lock "refresh_cache-1847c5d8-16eb-4feb-8a09-24ad6728e59c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.855706] env[62619]: DEBUG nova.compute.manager [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1393.855706] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1393.855706] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3de43b0-8bab-446b-86a4-a41235c2506c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.860922] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1393.861215] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52283da4-b32d-455e-b42a-2f8ee34d681d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.871229] env[62619]: DEBUG oslo_vmware.api [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for the task: (returnval){ [ 1393.871229] env[62619]: value = "task-1777226" [ 1393.871229] env[62619]: _type = "Task" [ 1393.871229] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.876403] env[62619]: DEBUG nova.scheduler.client.report [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1393.889069] env[62619]: DEBUG oslo_vmware.api [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.956672] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5275adf5-16c0-3893-2532-b4bc7d7ab167, 'name': SearchDatastore_Task, 'duration_secs': 0.019384} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.956672] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.956672] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e9ca5148-f188-4a15-83ae-8f3d730b0dab/e9ca5148-f188-4a15-83ae-8f3d730b0dab.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1393.956672] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0510952a-5640-4b6c-9b41-cf1d8a9a1f2c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.966096] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1393.966096] env[62619]: value = "task-1777227" [ 1393.966096] env[62619]: _type = "Task" [ 1393.966096] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.966096] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.980837] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.007661] env[62619]: DEBUG nova.network.neutron [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Successfully updated port: 3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1394.151800] env[62619]: DEBUG oslo_concurrency.lockutils [req-221ed63d-5b57-4752-9333-71f2c8a9934e req-47d6ed31-5083-4be5-9fad-7e3f1415d28e service nova] Releasing lock "refresh_cache-fb231b38-950e-4c86-bfe5-4c10a304910f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.385157] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.166s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.387273] env[62619]: DEBUG oslo_vmware.api [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777226, 'name': PowerOffVM_Task, 'duration_secs': 0.25865} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.389935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.051s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.389935] env[62619]: DEBUG nova.objects.instance [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lazy-loading 'resources' on Instance uuid b334cb41-5ddf-4545-8e2a-97c4d1de7cbf {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1394.391286] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1394.391286] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1394.391286] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f1bd0d8-93ca-477d-a4b8-3c9a77d3fd97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.418993] env[62619]: INFO nova.scheduler.client.report [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Deleted allocations for instance 28a8485c-fc0d-4fd0-8be9-37c49caf89b8 [ 1394.429760] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1394.429990] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1394.431037] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Deleting the datastore file [datastore1] 1847c5d8-16eb-4feb-8a09-24ad6728e59c {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1394.431368] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b251a6ca-2e2c-4586-a183-661acd27cea0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.446443] env[62619]: DEBUG oslo_vmware.api [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for the task: (returnval){ [ 1394.446443] env[62619]: value = "task-1777229" [ 1394.446443] env[62619]: _type = "Task" [ 1394.446443] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.460400] env[62619]: DEBUG oslo_vmware.api [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.478257] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777227, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.512147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquiring lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.512253] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquired lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.512404] env[62619]: DEBUG nova.network.neutron [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1394.704548] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.940524] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b7bed78-0285-4463-b9f5-4a2d47d543ce tempest-ImagesNegativeTestJSON-72724437 tempest-ImagesNegativeTestJSON-72724437-project-member] Lock "28a8485c-fc0d-4fd0-8be9-37c49caf89b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.151s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.958027] env[62619]: DEBUG oslo_vmware.api [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Task: {'id': task-1777229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288078} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.960761] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1394.961043] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1394.962235] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1394.962435] env[62619]: INFO nova.compute.manager [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1394.962710] env[62619]: DEBUG oslo.service.loopingcall [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1394.966521] env[62619]: DEBUG nova.compute.manager [-] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1394.966627] env[62619]: DEBUG nova.network.neutron [-] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1394.978435] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777227, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.721205} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.981488] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e9ca5148-f188-4a15-83ae-8f3d730b0dab/e9ca5148-f188-4a15-83ae-8f3d730b0dab.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1394.982180] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1394.982860] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32f2d085-e537-4251-ac8c-c4fe910010e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.990967] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1394.990967] env[62619]: value = "task-1777230" [ 1394.990967] env[62619]: _type = "Task" [ 1394.990967] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.999255] env[62619]: DEBUG nova.network.neutron [-] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1395.009221] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777230, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.218020] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1395.250773] env[62619]: DEBUG nova.network.neutron [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1395.286456] env[62619]: DEBUG nova.compute.manager [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Received event network-vif-plugged-3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1395.286692] env[62619]: DEBUG oslo_concurrency.lockutils [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] Acquiring lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.286907] env[62619]: DEBUG oslo_concurrency.lockutils [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] Lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.287141] env[62619]: DEBUG oslo_concurrency.lockutils [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] Lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.287363] env[62619]: DEBUG nova.compute.manager [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] No waiting events found dispatching network-vif-plugged-3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1395.287569] env[62619]: WARNING nova.compute.manager [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Received unexpected event network-vif-plugged-3e989184-0116-4b59-b0cb-45f895f69e47 for instance with vm_state building and task_state spawning. [ 1395.287569] env[62619]: DEBUG nova.compute.manager [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Received event network-changed-3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1395.287569] env[62619]: DEBUG nova.compute.manager [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Refreshing instance network info cache due to event network-changed-3e989184-0116-4b59-b0cb-45f895f69e47. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1395.287569] env[62619]: DEBUG oslo_concurrency.lockutils [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] Acquiring lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.452228] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28cbc92-c35a-4033-8f98-419e4da25120 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.460299] env[62619]: DEBUG nova.network.neutron [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updating instance_info_cache with network_info: [{"id": "3e989184-0116-4b59-b0cb-45f895f69e47", "address": "fa:16:3e:69:5f:b0", "network": {"id": "458b9515-f6cd-49e3-b6f2-594543a98f0d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1747794949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "22106ed527594810885b6891b382c3ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e989184-01", "ovs_interfaceid": "3e989184-0116-4b59-b0cb-45f895f69e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.462223] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39edddf8-e22b-4754-a40b-1a471a1bc718 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.498309] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1ac21f-96c0-4b69-90ca-49b5e3e43873 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.502174] env[62619]: DEBUG nova.network.neutron [-] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.509901] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a81751-1fae-41f0-9862-50627d8387a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.513577] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777230, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.286302} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.514503] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1395.516740] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0a2af0-41a7-4fd4-a45f-45b858e4a472 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.528691] env[62619]: DEBUG nova.compute.provider_tree [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.550336] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] e9ca5148-f188-4a15-83ae-8f3d730b0dab/e9ca5148-f188-4a15-83ae-8f3d730b0dab.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1395.552184] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23e08d03-408b-4ff9-9928-171d757ae9f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.573255] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1395.573255] env[62619]: value = "task-1777231" [ 1395.573255] env[62619]: _type = "Task" [ 1395.573255] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.581707] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.709163] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1395.970023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Releasing lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.970023] env[62619]: DEBUG nova.compute.manager [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Instance network_info: |[{"id": "3e989184-0116-4b59-b0cb-45f895f69e47", "address": "fa:16:3e:69:5f:b0", "network": {"id": "458b9515-f6cd-49e3-b6f2-594543a98f0d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1747794949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "22106ed527594810885b6891b382c3ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e989184-01", "ovs_interfaceid": "3e989184-0116-4b59-b0cb-45f895f69e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1395.970460] env[62619]: DEBUG oslo_concurrency.lockutils [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] Acquired lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.970460] env[62619]: DEBUG nova.network.neutron [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Refreshing network info cache for port 3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1395.970460] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:5f:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '360308f4-9d0a-4ec2-8bcf-44891f452847', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e989184-0116-4b59-b0cb-45f895f69e47', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1395.982879] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Creating folder: Project (22106ed527594810885b6891b382c3ce). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1395.984436] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0de217f6-d1a2-421b-918c-4c95a1ba1bb4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.998463] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Created folder: Project (22106ed527594810885b6891b382c3ce) in parent group-v368875. [ 1395.998463] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Creating folder: Instances. Parent ref: group-v368926. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1395.998463] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43f1c56d-c075-48b3-a810-515bf5bf83c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.010124] env[62619]: INFO nova.compute.manager [-] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Took 1.04 seconds to deallocate network for instance. [ 1396.012097] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Created folder: Instances in parent group-v368926. [ 1396.012097] env[62619]: DEBUG oslo.service.loopingcall [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.013084] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1396.013394] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ead94f5e-399a-4150-9361-8810d4e67a55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.034304] env[62619]: DEBUG nova.scheduler.client.report [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1396.043261] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1396.043261] env[62619]: value = "task-1777234" [ 1396.043261] env[62619]: _type = "Task" [ 1396.043261] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.055085] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777234, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.084792] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777231, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.539481] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.539481] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.150s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.541491] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.982s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.542098] env[62619]: DEBUG nova.objects.instance [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lazy-loading 'resources' on Instance uuid e0e25ddd-3692-480f-bfa0-212741c0d882 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1396.557423] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777234, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.559261] env[62619]: INFO nova.scheduler.client.report [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Deleted allocations for instance b334cb41-5ddf-4545-8e2a-97c4d1de7cbf [ 1396.583865] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777231, 'name': ReconfigVM_Task, 'duration_secs': 0.988661} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.584435] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Reconfigured VM instance instance-00000012 to attach disk [datastore1] e9ca5148-f188-4a15-83ae-8f3d730b0dab/e9ca5148-f188-4a15-83ae-8f3d730b0dab.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1396.585178] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e96f10ef-3e00-4a6c-90eb-4a2447717d41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.595193] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1396.595193] env[62619]: value = "task-1777235" [ 1396.595193] env[62619]: _type = "Task" [ 1396.595193] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.602286] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777235, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.709637] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquiring lock "f46de981-1f04-4baf-874c-de1b95d16f9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.709718] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lock "f46de981-1f04-4baf-874c-de1b95d16f9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.710065] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquiring lock "f46de981-1f04-4baf-874c-de1b95d16f9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.710065] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lock "f46de981-1f04-4baf-874c-de1b95d16f9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.710320] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lock "f46de981-1f04-4baf-874c-de1b95d16f9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.711864] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.712030] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1396.712172] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1396.714407] env[62619]: INFO nova.compute.manager [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Terminating instance [ 1396.882355] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "9014ef05-64d1-4bd6-9f2e-db58003b6520" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.882923] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "9014ef05-64d1-4bd6-9f2e-db58003b6520" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.954856] env[62619]: DEBUG nova.network.neutron [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updated VIF entry in instance network info cache for port 3e989184-0116-4b59-b0cb-45f895f69e47. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1396.955755] env[62619]: DEBUG nova.network.neutron [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updating instance_info_cache with network_info: [{"id": "3e989184-0116-4b59-b0cb-45f895f69e47", "address": "fa:16:3e:69:5f:b0", "network": {"id": "458b9515-f6cd-49e3-b6f2-594543a98f0d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1747794949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "22106ed527594810885b6891b382c3ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e989184-01", "ovs_interfaceid": "3e989184-0116-4b59-b0cb-45f895f69e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.056829] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777234, 'name': CreateVM_Task, 'duration_secs': 0.55453} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.057450] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.057930] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.058471] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.058921] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1397.059243] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-375e0d2b-069f-4939-bf0e-b3d4034121ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.071932] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1397.071932] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db94f0-7488-dfcd-903d-449a6dc0b949" [ 1397.071932] env[62619]: _type = "Task" [ 1397.071932] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.072636] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa96bdd-caef-4cb5-a3f8-dcce4358d0c2 tempest-InstanceActionsTestJSON-1460799651 tempest-InstanceActionsTestJSON-1460799651-project-member] Lock "b334cb41-5ddf-4545-8e2a-97c4d1de7cbf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.135s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.091209] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db94f0-7488-dfcd-903d-449a6dc0b949, 'name': SearchDatastore_Task, 'duration_secs': 0.011496} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.091512] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.092303] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1397.093048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.093048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.093932] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1397.093932] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bc0a98d-aa58-4551-89f6-0115900038b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.107701] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777235, 'name': Rename_Task, 'duration_secs': 0.170048} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.108840] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1397.109125] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1397.109285] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1397.109964] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac078b0e-871d-40e4-ad75-b497bb0f1407 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.111503] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d46c269-acc6-40ce-945a-719b9a0a9fe7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.121727] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1397.121727] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525e1bfe-83c3-6b80-4962-439b91a9c946" [ 1397.121727] env[62619]: _type = "Task" [ 1397.121727] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.124796] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1397.124796] env[62619]: value = "task-1777236" [ 1397.124796] env[62619]: _type = "Task" [ 1397.124796] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.132582] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525e1bfe-83c3-6b80-4962-439b91a9c946, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.139609] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777236, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.219489] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Skipping network cache update for instance because it is being deleted. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10326}} [ 1397.219866] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1397.219866] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1397.224108] env[62619]: DEBUG nova.compute.manager [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1397.224108] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1397.224108] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5a831a-c9db-4550-88f6-e73ae5894028 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.232075] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1397.235898] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55ee0a3d-3ec5-4d7b-948b-4d24f7a234d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.242354] env[62619]: DEBUG oslo_vmware.api [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for the task: (returnval){ [ 1397.242354] env[62619]: value = "task-1777237" [ 1397.242354] env[62619]: _type = "Task" [ 1397.242354] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.250715] env[62619]: DEBUG oslo_vmware.api [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.280903] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "refresh_cache-08c1fef9-40fc-4420-91de-fe911dea70f7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.281068] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquired lock "refresh_cache-08c1fef9-40fc-4420-91de-fe911dea70f7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.281214] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1397.281377] env[62619]: DEBUG nova.objects.instance [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lazy-loading 'info_cache' on Instance uuid 08c1fef9-40fc-4420-91de-fe911dea70f7 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1397.459794] env[62619]: DEBUG oslo_concurrency.lockutils [req-d2cfd787-a710-40df-a556-0ab8adf8904c req-e5767d97-784d-44b2-833b-6eb2b6b70ec4 service nova] Releasing lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.563235] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921a0c54-ad61-4a4f-91a6-15437dff2849 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.579596] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5133a738-767c-4e5b-b594-2ca2f0d12e06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.616869] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8ad731-413e-4246-9b42-24cc47c5fe42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.634636] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734017b8-2ec0-473a-8e45-65bc4c312269 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.643465] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525e1bfe-83c3-6b80-4962-439b91a9c946, 'name': SearchDatastore_Task, 'duration_secs': 0.018802} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.645025] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef679678-b6a8-4d29-8d85-d08721fb10bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.661450] env[62619]: DEBUG nova.compute.provider_tree [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1397.662985] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777236, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.667858] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1397.667858] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521ffe79-4ddb-7de0-fc7e-dc8200a07bb8" [ 1397.667858] env[62619]: _type = "Task" [ 1397.667858] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.675269] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521ffe79-4ddb-7de0-fc7e-dc8200a07bb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.753874] env[62619]: DEBUG oslo_vmware.api [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777237, 'name': PowerOffVM_Task, 'duration_secs': 0.355661} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.754281] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1397.754529] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1397.754844] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e295b3f-764f-4e79-b68c-a1eace10a574 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.831089] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1397.831382] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1397.831652] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Deleting the datastore file [datastore1] f46de981-1f04-4baf-874c-de1b95d16f9d {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1397.831919] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b971b06-8e63-48ba-b305-6af19bf74498 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.838783] env[62619]: DEBUG oslo_vmware.api [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for the task: (returnval){ [ 1397.838783] env[62619]: value = "task-1777239" [ 1397.838783] env[62619]: _type = "Task" [ 1397.838783] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.847592] env[62619]: DEBUG oslo_vmware.api [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777239, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.141492] env[62619]: DEBUG oslo_vmware.api [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777236, 'name': PowerOnVM_Task, 'duration_secs': 0.86388} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.142097] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1398.142097] env[62619]: INFO nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Took 10.90 seconds to spawn the instance on the hypervisor. [ 1398.142278] env[62619]: DEBUG nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1398.143131] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c94df7a-4dfe-4434-99ed-9964c5fffc76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.182208] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521ffe79-4ddb-7de0-fc7e-dc8200a07bb8, 'name': SearchDatastore_Task, 'duration_secs': 0.011435} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.182579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.182833] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d16bebd1-a144-4d73-8eb6-8ab12a08fe69/d16bebd1-a144-4d73-8eb6-8ab12a08fe69.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1398.183122] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc4768da-5b91-43ef-8ddd-926821b14aef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.190714] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1398.190714] env[62619]: value = "task-1777240" [ 1398.190714] env[62619]: _type = "Task" [ 1398.190714] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.201272] env[62619]: ERROR nova.scheduler.client.report [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] [req-a3268afb-da57-4afa-ae81-246b5518f1cd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a3268afb-da57-4afa-ae81-246b5518f1cd"}]} [ 1398.207710] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.229478] env[62619]: DEBUG nova.scheduler.client.report [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1398.257679] env[62619]: DEBUG nova.scheduler.client.report [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1398.257869] env[62619]: DEBUG nova.compute.provider_tree [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1398.279288] env[62619]: DEBUG nova.scheduler.client.report [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1398.309830] env[62619]: DEBUG nova.scheduler.client.report [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1398.351591] env[62619]: DEBUG oslo_vmware.api [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Task: {'id': task-1777239, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263402} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.353774] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1398.353774] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1398.353774] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1398.353774] env[62619]: INFO nova.compute.manager [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1398.353774] env[62619]: DEBUG oslo.service.loopingcall [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1398.353995] env[62619]: DEBUG nova.compute.manager [-] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1398.353995] env[62619]: DEBUG nova.network.neutron [-] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1398.523367] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1398.674733] env[62619]: INFO nova.compute.manager [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Took 33.77 seconds to build instance. [ 1399.411417] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c9e89d3-0d7a-4f83-9cbc-3e1ca3e4aea6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.538s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.411680] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777240, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.412647] env[62619]: WARNING oslo_vmware.common.loopingcall [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] task run outlasted interval by 0.22006099999999995 sec [ 1399.420465] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquiring lock "40eeb844-7423-4818-8095-81062c7e6392" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.422525] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Lock "40eeb844-7423-4818-8095-81062c7e6392" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.431279] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777240, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533518} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.431485] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d16bebd1-a144-4d73-8eb6-8ab12a08fe69/d16bebd1-a144-4d73-8eb6-8ab12a08fe69.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1399.431722] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1399.431979] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e38c3a81-8931-478a-8ae4-7fbd0fb2d65b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.441219] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1399.441219] env[62619]: value = "task-1777241" [ 1399.441219] env[62619]: _type = "Task" [ 1399.441219] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.457380] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777241, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.587751] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7f9d2e-3f53-4a49-a16e-b5113f1a2aea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.597026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384a881d-e545-421a-ac2f-21517c74e89e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.630130] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.632632] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e2d720-87b2-4659-827a-7cbcc6fc0c7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.642290] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6774f43-e681-43d3-a323-3da74a25a5de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.660611] env[62619]: DEBUG nova.compute.provider_tree [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1399.923703] env[62619]: DEBUG nova.network.neutron [-] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.925026] env[62619]: DEBUG nova.compute.manager [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1399.951407] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777241, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071794} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.951686] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1399.952460] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd73e00c-a94a-4d3a-8121-ce83bcde0fd9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.975400] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] d16bebd1-a144-4d73-8eb6-8ab12a08fe69/d16bebd1-a144-4d73-8eb6-8ab12a08fe69.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1399.976442] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8aab5a2e-2a2b-4555-bc44-3275f97c85ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.996749] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1399.996749] env[62619]: value = "task-1777242" [ 1399.996749] env[62619]: _type = "Task" [ 1399.996749] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.008427] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777242, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.078363] env[62619]: DEBUG nova.compute.manager [req-00673ba7-0123-4c3a-af53-622581a271a6 req-0b08e503-836b-4476-95cc-6f56c389eef4 service nova] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Received event network-vif-deleted-cfe84dde-a706-4b94-9382-35d5056a97d2 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1400.138977] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Releasing lock "refresh_cache-08c1fef9-40fc-4420-91de-fe911dea70f7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.138977] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1400.138977] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1400.138977] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1400.138977] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1400.168084] env[62619]: DEBUG nova.scheduler.client.report [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1400.431551] env[62619]: INFO nova.compute.manager [-] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Took 2.08 seconds to deallocate network for instance. [ 1400.464243] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.513954] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777242, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.641526] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.676890] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.135s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.681096] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.598s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.684366] env[62619]: INFO nova.compute.claims [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1400.723824] env[62619]: INFO nova.scheduler.client.report [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Deleted allocations for instance e0e25ddd-3692-480f-bfa0-212741c0d882 [ 1400.958757] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.012321] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777242, 'name': ReconfigVM_Task, 'duration_secs': 0.692918} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.012321] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Reconfigured VM instance instance-00000013 to attach disk [datastore1] d16bebd1-a144-4d73-8eb6-8ab12a08fe69/d16bebd1-a144-4d73-8eb6-8ab12a08fe69.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1401.012321] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-130bef47-37a0-4c6b-a26e-3e2f958d1d03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.017861] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1401.017861] env[62619]: value = "task-1777243" [ 1401.017861] env[62619]: _type = "Task" [ 1401.017861] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.026129] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777243, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.239030] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc1217c-9fbc-480e-b9e4-055c60e5de93 tempest-InstanceActionsV221TestJSON-2057265891 tempest-InstanceActionsV221TestJSON-2057265891-project-member] Lock "e0e25ddd-3692-480f-bfa0-212741c0d882" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.018s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.353590] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "d7b2d831-b2ae-445c-887b-290171ae5d80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.353590] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "d7b2d831-b2ae-445c-887b-290171ae5d80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.353590] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "d7b2d831-b2ae-445c-887b-290171ae5d80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.353590] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "d7b2d831-b2ae-445c-887b-290171ae5d80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.353772] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "d7b2d831-b2ae-445c-887b-290171ae5d80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.356204] env[62619]: INFO nova.compute.manager [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Terminating instance [ 1401.530683] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777243, 'name': Rename_Task, 'duration_secs': 0.309539} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.531051] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1401.531637] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-611a6dd9-4361-4c3f-88ad-1bd926f6e5e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.539058] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1401.539058] env[62619]: value = "task-1777244" [ 1401.539058] env[62619]: _type = "Task" [ 1401.539058] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.547245] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.862564] env[62619]: DEBUG nova.compute.manager [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1401.862827] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1401.863754] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd7318b-89e1-473d-897d-faf45f94dd8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.878785] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1401.879074] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a64f7b3b-9c3f-479f-85c6-87a20b0d6715 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.890761] env[62619]: DEBUG oslo_vmware.api [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1401.890761] env[62619]: value = "task-1777245" [ 1401.890761] env[62619]: _type = "Task" [ 1401.890761] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.911029] env[62619]: DEBUG oslo_vmware.api [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777245, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.049520] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777244, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.277757] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2236f4d3-f7c3-459c-a878-c99c39d55439 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.287373] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84215c16-4dac-4ce7-8b03-6d0d636f3e55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.330891] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130b97d7-8241-457b-b704-ba335da3bccc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.339398] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c68c72-8dc7-420e-8335-97939eb526d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.355148] env[62619]: DEBUG nova.compute.provider_tree [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1402.409398] env[62619]: DEBUG oslo_vmware.api [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777245, 'name': PowerOffVM_Task, 'duration_secs': 0.413066} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.409925] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1402.410288] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1402.410723] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd829bcf-2947-4576-9aa8-02bc394c0b8b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.491116] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1402.491116] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1402.491116] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Deleting the datastore file [datastore1] d7b2d831-b2ae-445c-887b-290171ae5d80 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1402.491116] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0941207-4717-4e34-99b7-16b36387fa60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.501286] env[62619]: DEBUG oslo_vmware.api [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1402.501286] env[62619]: value = "task-1777247" [ 1402.501286] env[62619]: _type = "Task" [ 1402.501286] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.514466] env[62619]: DEBUG oslo_vmware.api [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777247, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.553187] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777244, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.859591] env[62619]: DEBUG nova.scheduler.client.report [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1403.015810] env[62619]: DEBUG oslo_vmware.api [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777247, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19913} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.018204] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1403.018204] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1403.018204] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1403.018204] env[62619]: INFO nova.compute.manager [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1403.018204] env[62619]: DEBUG oslo.service.loopingcall [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1403.018622] env[62619]: DEBUG nova.compute.manager [-] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1403.018622] env[62619]: DEBUG nova.network.neutron [-] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1403.050614] env[62619]: DEBUG oslo_vmware.api [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777244, 'name': PowerOnVM_Task, 'duration_secs': 1.383046} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.051100] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1403.051393] env[62619]: INFO nova.compute.manager [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Took 10.95 seconds to spawn the instance on the hypervisor. [ 1403.051681] env[62619]: DEBUG nova.compute.manager [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1403.052749] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05be8e73-e006-4827-90b5-0aa6e1c1e7e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.370770] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.371306] env[62619]: DEBUG nova.compute.manager [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1403.374033] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.610s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.375277] env[62619]: INFO nova.compute.claims [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1403.576261] env[62619]: INFO nova.compute.manager [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Took 34.44 seconds to build instance. [ 1403.881702] env[62619]: DEBUG nova.compute.utils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1403.884456] env[62619]: DEBUG nova.compute.manager [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1404.080044] env[62619]: DEBUG oslo_concurrency.lockutils [None req-118ea0f3-0a5f-4b2c-91fc-6013989266e3 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.383s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.386148] env[62619]: DEBUG nova.compute.manager [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1404.510711] env[62619]: DEBUG nova.network.neutron [-] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.582959] env[62619]: DEBUG nova.compute.manager [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1404.661107] env[62619]: DEBUG nova.compute.manager [req-2ddce0eb-0754-4bdc-abd2-3276687ed33d req-664087f8-14b1-4031-9cfc-e4e44d0630de service nova] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Received event network-vif-deleted-4bd598be-9cdb-42cd-a9b0-53e9eb3622ec {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1405.012744] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b26bdfd-5325-48b9-8fc2-458d4fbd1981 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.015693] env[62619]: INFO nova.compute.manager [-] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Took 2.00 seconds to deallocate network for instance. [ 1405.024012] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6c6622-a673-4333-9030-308f2e4fdee3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.069803] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d9feea-d0d1-4fb7-a1d8-f26e66af7d88 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.078990] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510c3ec1-b518-4a84-a5a3-5a8206115f03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.100319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquiring lock "597c0f95-5798-4022-8e2e-89a700698d7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.101677] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lock "597c0f95-5798-4022-8e2e-89a700698d7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.103094] env[62619]: DEBUG nova.compute.provider_tree [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1405.131038] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.412218] env[62619]: DEBUG nova.compute.manager [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1405.440963] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1405.442039] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1405.442039] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1405.442039] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1405.442039] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1405.442039] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1405.442214] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1405.442894] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1405.442894] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1405.443097] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1405.443276] env[62619]: DEBUG nova.virt.hardware [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1405.445120] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0397d246-93cb-4a6b-9f47-64e89677fdd7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.456712] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3da93c8-2c03-4396-9a5c-24e651c14408 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.476798] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1405.481774] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Creating folder: Project (6f2cb2dacaef4ca687a28a5569d9bec7). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1405.482600] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c28e35f7-6982-42e9-badd-296af4c3f2cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.494856] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Created folder: Project (6f2cb2dacaef4ca687a28a5569d9bec7) in parent group-v368875. [ 1405.495080] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Creating folder: Instances. Parent ref: group-v368929. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1405.495331] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c891299-60a9-416a-b5ab-45f519b3acf7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.507947] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Created folder: Instances in parent group-v368929. [ 1405.508236] env[62619]: DEBUG oslo.service.loopingcall [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1405.508424] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1405.508640] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e78cc6d4-950b-4805-a49c-dbfd442b73f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.524610] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.527056] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1405.527056] env[62619]: value = "task-1777250" [ 1405.527056] env[62619]: _type = "Task" [ 1405.527056] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.535897] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777250, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.610268] env[62619]: DEBUG nova.scheduler.client.report [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1405.904978] env[62619]: INFO nova.compute.manager [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Rescuing [ 1405.905275] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquiring lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.905528] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquired lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.905628] env[62619]: DEBUG nova.network.neutron [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1406.039258] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777250, 'name': CreateVM_Task, 'duration_secs': 0.455604} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.039869] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1406.040064] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.041197] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.041197] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1406.041197] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3fea39f-ce14-4400-9528-fe11c4a2b539 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.046533] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1406.046533] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5290dd11-43f4-84c8-9210-e988cbe20ea2" [ 1406.046533] env[62619]: _type = "Task" [ 1406.046533] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.056227] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5290dd11-43f4-84c8-9210-e988cbe20ea2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.115202] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.741s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.115406] env[62619]: DEBUG nova.compute.manager [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1406.122283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.820s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.123842] env[62619]: INFO nova.compute.claims [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1406.561153] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5290dd11-43f4-84c8-9210-e988cbe20ea2, 'name': SearchDatastore_Task, 'duration_secs': 0.040696} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.561153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.561153] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1406.561153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.561365] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.561365] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1406.561365] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d4e7206-d5fc-47e3-bc44-623fffea497c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.574617] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1406.574959] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1406.575800] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad3d9d18-54d5-4220-bdca-e17e3f4e222f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.581890] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1406.581890] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e8c025-9d39-14a2-6819-d3fbdd904299" [ 1406.581890] env[62619]: _type = "Task" [ 1406.581890] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.590558] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e8c025-9d39-14a2-6819-d3fbdd904299, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.631337] env[62619]: DEBUG nova.compute.utils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1406.634265] env[62619]: DEBUG nova.compute.manager [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1406.634265] env[62619]: DEBUG nova.network.neutron [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1406.779728] env[62619]: DEBUG nova.policy [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ca53373828a486683f98809983c7751', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '064c40929d5b478eb56dc54fcb46bc21', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1407.092032] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e8c025-9d39-14a2-6819-d3fbdd904299, 'name': SearchDatastore_Task, 'duration_secs': 0.041976} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.092786] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-244821c7-b84c-4be1-a427-8ef5e4748d41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.098641] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1407.098641] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5276d59d-1720-fa55-bfb9-fda4212d4d23" [ 1407.098641] env[62619]: _type = "Task" [ 1407.098641] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.108255] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5276d59d-1720-fa55-bfb9-fda4212d4d23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.141330] env[62619]: DEBUG nova.compute.manager [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1407.204391] env[62619]: DEBUG nova.network.neutron [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updating instance_info_cache with network_info: [{"id": "3e989184-0116-4b59-b0cb-45f895f69e47", "address": "fa:16:3e:69:5f:b0", "network": {"id": "458b9515-f6cd-49e3-b6f2-594543a98f0d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1747794949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "22106ed527594810885b6891b382c3ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e989184-01", "ovs_interfaceid": "3e989184-0116-4b59-b0cb-45f895f69e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.381877] env[62619]: DEBUG nova.network.neutron [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Successfully created port: 902ccd9e-7453-4859-8766-73a3449e946d {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1407.611526] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5276d59d-1720-fa55-bfb9-fda4212d4d23, 'name': SearchDatastore_Task, 'duration_secs': 0.012579} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.614266] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.614567] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1407.615469] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cae3554-4e80-4ad5-bd15-0c66959fef4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.623255] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1407.623255] env[62619]: value = "task-1777251" [ 1407.623255] env[62619]: _type = "Task" [ 1407.623255] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.631050] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.666799] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706c7182-183a-41cd-9acf-cf09c939e1c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.678635] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0a2a69-088a-489a-b952-9f99a8320552 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.712800] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Releasing lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.716974] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e7a6e7-54f0-4cf5-8cf2-54ec83b60c5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.725594] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcf3a27-376b-4e4a-8377-4f059a8a2840 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.741514] env[62619]: DEBUG nova.compute.provider_tree [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1408.136882] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777251, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.155459] env[62619]: DEBUG nova.compute.manager [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1408.183157] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1408.183157] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1408.183157] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1408.183712] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1408.184119] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1408.184471] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1408.185016] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1408.185530] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1408.186105] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1408.186480] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1408.186821] env[62619]: DEBUG nova.virt.hardware [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1408.188212] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05cfc9d-96c8-481f-a160-5c58de474799 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.200242] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ffd1cf-d876-47bd-b8a5-fa6c0eff56d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.247190] env[62619]: DEBUG nova.scheduler.client.report [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1408.641109] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777251, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70059} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.641109] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1408.641109] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1408.641109] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2397b419-1e85-4faf-9f74-7b640068665c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.647315] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1408.647315] env[62619]: value = "task-1777252" [ 1408.647315] env[62619]: _type = "Task" [ 1408.647315] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.657681] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777252, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.753177] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.754660] env[62619]: DEBUG nova.compute.manager [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1408.756665] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 28.293s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.163621] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777252, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154501} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.164405] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1409.165209] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75136fb5-9b9d-4e31-bfe7-5dc81c7d240b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.190037] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1409.190945] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d63cd0f1-12f2-4413-ba61-5ec2bce46305 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.213111] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1409.213111] env[62619]: value = "task-1777253" [ 1409.213111] env[62619]: _type = "Task" [ 1409.213111] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.220693] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777253, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.255592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "cef20063-96f0-46cc-9f7d-4436b60216c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.255904] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.261723] env[62619]: DEBUG nova.compute.utils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1409.265716] env[62619]: INFO nova.compute.claims [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1409.269819] env[62619]: DEBUG nova.compute.manager [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1409.269916] env[62619]: DEBUG nova.network.neutron [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1409.275018] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1409.275018] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0c8b227-3ddf-4d7a-bb08-b1a94e06b9f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.282780] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1409.282780] env[62619]: value = "task-1777254" [ 1409.282780] env[62619]: _type = "Task" [ 1409.282780] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.293935] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.403947] env[62619]: DEBUG nova.policy [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91b100cc8f94b93af086dafebe29092', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c24c9d49d8d4104a0868f126eb3a26e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1409.726859] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777253, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.772819] env[62619]: DEBUG nova.compute.manager [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1409.778901] env[62619]: INFO nova.compute.resource_tracker [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating resource usage from migration 610837f7-e212-4892-aec3-980eeb410f02 [ 1409.806034] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777254, 'name': PowerOffVM_Task, 'duration_secs': 0.199917} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.806034] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1409.806793] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815ae91a-ac19-400f-87b3-54acbc2c2654 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.832911] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5408cc-3a6f-473c-a069-29d57e341492 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.868400] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1409.869462] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5cde800-9bbb-4637-9c08-7036c0f4862d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.875259] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1409.875259] env[62619]: value = "task-1777255" [ 1409.875259] env[62619]: _type = "Task" [ 1409.875259] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.887197] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1409.887478] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1409.887725] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.887868] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.888049] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1409.888453] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ecf7e8d3-e61f-4d9e-ac77-dc6a6a7f0c0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.896682] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1409.896845] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1409.897577] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dac12f73-eeac-4943-9c30-3ae615e7bbcd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.902688] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1409.902688] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529602c2-6c2c-6ec1-aab0-07681249c47e" [ 1409.902688] env[62619]: _type = "Task" [ 1409.902688] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.913793] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529602c2-6c2c-6ec1-aab0-07681249c47e, 'name': SearchDatastore_Task, 'duration_secs': 0.00826} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.914727] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1ff81ac-1ead-47f9-95b5-f4cdb16a1648 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.921828] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1409.921828] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529d92ba-9066-5cbe-138d-1e3b65e75fc5" [ 1409.921828] env[62619]: _type = "Task" [ 1409.921828] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.929354] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529d92ba-9066-5cbe-138d-1e3b65e75fc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.125537] env[62619]: DEBUG nova.network.neutron [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Successfully updated port: 902ccd9e-7453-4859-8766-73a3449e946d {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1410.223763] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777253, 'name': ReconfigVM_Task, 'duration_secs': 0.519267} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.227664] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Reconfigured VM instance instance-00000014 to attach disk [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1410.229660] env[62619]: DEBUG nova.network.neutron [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Successfully created port: 28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1410.232758] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0232f166-6d33-451e-a5b0-c05d41905451 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.246284] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1410.246284] env[62619]: value = "task-1777256" [ 1410.246284] env[62619]: _type = "Task" [ 1410.246284] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.261758] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777256, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.330415] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2827293c-c9e4-4c28-807a-7a09dc1a4a20 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.339618] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1031347a-122a-43b0-8e9d-9631c4a65003 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.381458] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636ab78f-e488-497e-936b-ee8ba38dbce1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.395467] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19bafa1-5f92-47f5-a21d-4b6f49bd6d0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.411640] env[62619]: DEBUG nova.compute.provider_tree [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1410.432688] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529d92ba-9066-5cbe-138d-1e3b65e75fc5, 'name': SearchDatastore_Task, 'duration_secs': 0.008408} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.433053] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.433359] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d16bebd1-a144-4d73-8eb6-8ab12a08fe69/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. {{(pid=62619) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1410.435386] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e915e15b-00b1-4916-b88e-e55ded965d93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.441446] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1410.441446] env[62619]: value = "task-1777257" [ 1410.441446] env[62619]: _type = "Task" [ 1410.441446] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.450990] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777257, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.635614] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquiring lock "refresh_cache-3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.635614] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquired lock "refresh_cache-3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.638299] env[62619]: DEBUG nova.network.neutron [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1410.751853] env[62619]: DEBUG nova.compute.manager [req-3429a924-42be-4751-bbf5-97e7df151fa6 req-0d6d40ca-3c9b-4259-b56b-bf2a7520b4fb service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Received event network-vif-plugged-902ccd9e-7453-4859-8766-73a3449e946d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1410.752249] env[62619]: DEBUG oslo_concurrency.lockutils [req-3429a924-42be-4751-bbf5-97e7df151fa6 req-0d6d40ca-3c9b-4259-b56b-bf2a7520b4fb service nova] Acquiring lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.752600] env[62619]: DEBUG oslo_concurrency.lockutils [req-3429a924-42be-4751-bbf5-97e7df151fa6 req-0d6d40ca-3c9b-4259-b56b-bf2a7520b4fb service nova] Lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.752771] env[62619]: DEBUG oslo_concurrency.lockutils [req-3429a924-42be-4751-bbf5-97e7df151fa6 req-0d6d40ca-3c9b-4259-b56b-bf2a7520b4fb service nova] Lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.752937] env[62619]: DEBUG nova.compute.manager [req-3429a924-42be-4751-bbf5-97e7df151fa6 req-0d6d40ca-3c9b-4259-b56b-bf2a7520b4fb service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] No waiting events found dispatching network-vif-plugged-902ccd9e-7453-4859-8766-73a3449e946d {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1410.753112] env[62619]: WARNING nova.compute.manager [req-3429a924-42be-4751-bbf5-97e7df151fa6 req-0d6d40ca-3c9b-4259-b56b-bf2a7520b4fb service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Received unexpected event network-vif-plugged-902ccd9e-7453-4859-8766-73a3449e946d for instance with vm_state building and task_state spawning. [ 1410.759856] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777256, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.793687] env[62619]: DEBUG nova.compute.manager [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1410.832269] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1410.832507] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1410.833248] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1410.833248] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1410.833248] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1410.833248] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1410.833426] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1410.833562] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1410.833742] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1410.833902] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1410.834127] env[62619]: DEBUG nova.virt.hardware [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1410.835747] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf6e93c-847d-4baa-ab43-f933c03d84ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.853944] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d46362-59f1-4acc-8820-dada16587fa6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.914898] env[62619]: DEBUG nova.scheduler.client.report [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1410.953936] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777257, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.184990] env[62619]: DEBUG nova.network.neutron [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1411.260107] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777256, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.364445] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "060427a2-e724-4c51-879e-675154ae5df2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.364673] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "060427a2-e724-4c51-879e-675154ae5df2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.424872] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.668s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.425712] env[62619]: INFO nova.compute.manager [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Migrating [ 1411.426476] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.426779] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.431207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.682s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.433466] env[62619]: DEBUG nova.objects.instance [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lazy-loading 'resources' on Instance uuid 08c1fef9-40fc-4420-91de-fe911dea70f7 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1411.458073] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777257, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569107} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.459050] env[62619]: DEBUG nova.network.neutron [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Updating instance_info_cache with network_info: [{"id": "902ccd9e-7453-4859-8766-73a3449e946d", "address": "fa:16:3e:12:1e:a5", "network": {"id": "0ca73c17-f43c-4e56-bf0c-0caa97365dfb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1757370232-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "064c40929d5b478eb56dc54fcb46bc21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap902ccd9e-74", "ovs_interfaceid": "902ccd9e-7453-4859-8766-73a3449e946d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.461751] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d16bebd1-a144-4d73-8eb6-8ab12a08fe69/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. [ 1411.463666] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f202f25-be0d-4f6b-ad00-0c2864fbcd3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.497971] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] d16bebd1-a144-4d73-8eb6-8ab12a08fe69/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1411.498475] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e94f2c00-586f-4e67-9673-62388875a249 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.520579] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1411.520579] env[62619]: value = "task-1777258" [ 1411.520579] env[62619]: _type = "Task" [ 1411.520579] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.529921] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777258, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.755053] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777256, 'name': Rename_Task, 'duration_secs': 1.118119} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.756099] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1411.756360] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84ec1a37-deb2-42a5-8837-b86ae1741817 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.764371] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1411.764371] env[62619]: value = "task-1777259" [ 1411.764371] env[62619]: _type = "Task" [ 1411.764371] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.773330] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.937632] env[62619]: INFO nova.compute.rpcapi [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 1411.937632] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.962534] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Releasing lock "refresh_cache-3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.962931] env[62619]: DEBUG nova.compute.manager [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Instance network_info: |[{"id": "902ccd9e-7453-4859-8766-73a3449e946d", "address": "fa:16:3e:12:1e:a5", "network": {"id": "0ca73c17-f43c-4e56-bf0c-0caa97365dfb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1757370232-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "064c40929d5b478eb56dc54fcb46bc21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap902ccd9e-74", "ovs_interfaceid": "902ccd9e-7453-4859-8766-73a3449e946d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1411.965482] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:1e:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56834f67-27a8-43dc-bbc6-a74aaa08959b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '902ccd9e-7453-4859-8766-73a3449e946d', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1411.979209] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Creating folder: Project (064c40929d5b478eb56dc54fcb46bc21). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1411.981057] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a04579ff-f687-4665-9614-02cc4c8b7c05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.991846] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Created folder: Project (064c40929d5b478eb56dc54fcb46bc21) in parent group-v368875. [ 1411.992044] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Creating folder: Instances. Parent ref: group-v368932. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1411.992282] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-502a8d3a-2689-4ca1-8c00-d95bc0beeb6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.001291] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Created folder: Instances in parent group-v368932. [ 1412.003143] env[62619]: DEBUG oslo.service.loopingcall [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.003143] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1412.003143] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2433b980-70d7-4f7a-949c-f301916dba97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.028253] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1412.028253] env[62619]: value = "task-1777262" [ 1412.028253] env[62619]: _type = "Task" [ 1412.028253] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.034896] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777258, 'name': ReconfigVM_Task, 'duration_secs': 0.333417} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.037677] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Reconfigured VM instance instance-00000013 to attach disk [datastore1] d16bebd1-a144-4d73-8eb6-8ab12a08fe69/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1412.039178] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61c09e4-f2ae-4b14-a0bc-9b44379cf0d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.044705] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777262, 'name': CreateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.071921] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a06617b-fadc-4c29-bbb3-50d8220f9b58 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.088242] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1412.088242] env[62619]: value = "task-1777263" [ 1412.088242] env[62619]: _type = "Task" [ 1412.088242] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.101718] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777263, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.276491] env[62619]: DEBUG oslo_vmware.api [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777259, 'name': PowerOnVM_Task, 'duration_secs': 0.435954} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.276766] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1412.276934] env[62619]: INFO nova.compute.manager [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Took 6.86 seconds to spawn the instance on the hypervisor. [ 1412.277199] env[62619]: DEBUG nova.compute.manager [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1412.277971] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc8d4d5-e8b8-4f50-87ab-d9183ffc048e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.353740] env[62619]: DEBUG nova.network.neutron [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Successfully updated port: 28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1412.462424] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.462616] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.462878] env[62619]: DEBUG nova.network.neutron [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1412.540886] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777262, 'name': CreateVM_Task, 'duration_secs': 0.415169} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.541072] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1412.542157] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.542324] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.542859] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1412.543119] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3684530c-04b8-48d3-8bd9-2a3e8ebddb1d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.550085] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for the task: (returnval){ [ 1412.550085] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529e404d-f39e-bd57-a3cb-1f513ff37dc0" [ 1412.550085] env[62619]: _type = "Task" [ 1412.550085] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.558165] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529e404d-f39e-bd57-a3cb-1f513ff37dc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.577252] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5398207-44b8-495e-aa82-fada3119b70a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.584537] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1799e278-f64f-4fe9-b3a7-8fb780f6f234 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.597466] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.621565] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1453788-2f2b-43c8-acc7-e8bdb58c49a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.629456] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e71df8-bcab-435c-962a-68ad896ab0f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.642870] env[62619]: DEBUG nova.compute.provider_tree [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1412.798127] env[62619]: INFO nova.compute.manager [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Took 35.75 seconds to build instance. [ 1412.861314] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.861468] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.862812] env[62619]: DEBUG nova.network.neutron [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1413.063281] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529e404d-f39e-bd57-a3cb-1f513ff37dc0, 'name': SearchDatastore_Task, 'duration_secs': 0.012184} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.063281] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.063281] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1413.063281] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.063735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.063735] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1413.063735] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1b0e161-d768-4165-8bb2-993f5e801360 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.073296] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1413.073296] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1413.073296] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f98b847c-8a2b-43db-a9e4-6e0306b95b65 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.078399] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for the task: (returnval){ [ 1413.078399] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c4b2d5-da94-7cd6-ff91-fe46ca133f92" [ 1413.078399] env[62619]: _type = "Task" [ 1413.078399] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.086635] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c4b2d5-da94-7cd6-ff91-fe46ca133f92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.098643] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777263, 'name': ReconfigVM_Task, 'duration_secs': 0.883058} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.098895] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1413.099218] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c0a1167-2cde-4d12-a3b1-24b66913a9a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.105286] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1413.105286] env[62619]: value = "task-1777264" [ 1413.105286] env[62619]: _type = "Task" [ 1413.105286] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.117102] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777264, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.146624] env[62619]: DEBUG nova.scheduler.client.report [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1413.299922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec62572-a810-436e-8d6c-a5ea2b9b1f49 tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "e81c03f7-9c0e-46bd-9641-aced82038eca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.508s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.312295] env[62619]: DEBUG nova.network.neutron [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance_info_cache with network_info: [{"id": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "address": "fa:16:3e:96:ec:35", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5530a947-e3", "ovs_interfaceid": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.409136] env[62619]: DEBUG nova.network.neutron [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1413.532567] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.532829] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.533941] env[62619]: DEBUG nova.compute.manager [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Received event network-changed-902ccd9e-7453-4859-8766-73a3449e946d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1413.534147] env[62619]: DEBUG nova.compute.manager [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Refreshing instance network info cache due to event network-changed-902ccd9e-7453-4859-8766-73a3449e946d. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1413.534340] env[62619]: DEBUG oslo_concurrency.lockutils [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] Acquiring lock "refresh_cache-3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.534468] env[62619]: DEBUG oslo_concurrency.lockutils [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] Acquired lock "refresh_cache-3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.534618] env[62619]: DEBUG nova.network.neutron [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Refreshing network info cache for port 902ccd9e-7453-4859-8766-73a3449e946d {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1413.565480] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "e32cb991-a018-4b55-8cdf-378e212c8434" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.565711] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e32cb991-a018-4b55-8cdf-378e212c8434" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.588902] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c4b2d5-da94-7cd6-ff91-fe46ca133f92, 'name': SearchDatastore_Task, 'duration_secs': 0.008277} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.592419] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a672e151-bb8a-4ca4-b78c-17e784e54871 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.597911] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for the task: (returnval){ [ 1413.597911] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524ddc23-d219-72a9-a9db-8bf4b91342b8" [ 1413.597911] env[62619]: _type = "Task" [ 1413.597911] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.607082] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524ddc23-d219-72a9-a9db-8bf4b91342b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.616142] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777264, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.652680] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.221s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.657926] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.583s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.659472] env[62619]: INFO nova.compute.claims [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1413.678323] env[62619]: DEBUG nova.network.neutron [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Updating instance_info_cache with network_info: [{"id": "28e9da04-af12-4a21-b4ee-408c492669ef", "address": "fa:16:3e:00:1b:cf", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28e9da04-af", "ovs_interfaceid": "28e9da04-af12-4a21-b4ee-408c492669ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.687424] env[62619]: INFO nova.scheduler.client.report [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted allocations for instance 08c1fef9-40fc-4420-91de-fe911dea70f7 [ 1413.805316] env[62619]: DEBUG nova.compute.manager [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1413.815117] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.111358] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524ddc23-d219-72a9-a9db-8bf4b91342b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009344} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.115385] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.115780] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41/3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1414.116302] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-475e3339-9552-4604-b445-018002c0db06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.126220] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777264, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.128377] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for the task: (returnval){ [ 1414.128377] env[62619]: value = "task-1777265" [ 1414.128377] env[62619]: _type = "Task" [ 1414.128377] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.138145] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777265, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.180021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.180359] env[62619]: DEBUG nova.compute.manager [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Instance network_info: |[{"id": "28e9da04-af12-4a21-b4ee-408c492669ef", "address": "fa:16:3e:00:1b:cf", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28e9da04-af", "ovs_interfaceid": "28e9da04-af12-4a21-b4ee-408c492669ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1414.180755] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:1b:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28e9da04-af12-4a21-b4ee-408c492669ef', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1414.189740] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Creating folder: Project (7c24c9d49d8d4104a0868f126eb3a26e). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1414.190440] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df476029-2add-4c38-abe3-bce135001448 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.195570] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb13e009-9ec7-4465-84e9-806127dfb70b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "08c1fef9-40fc-4420-91de-fe911dea70f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.599s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.204765] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Created folder: Project (7c24c9d49d8d4104a0868f126eb3a26e) in parent group-v368875. [ 1414.205019] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Creating folder: Instances. Parent ref: group-v368935. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1414.206023] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4694c8ae-1fce-45fa-acc2-90c247270c8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.215942] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Created folder: Instances in parent group-v368935. [ 1414.216207] env[62619]: DEBUG oslo.service.loopingcall [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.216640] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1414.216854] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0b90d41-4b3d-4106-87e9-71fe0dcff1f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.239113] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1414.239113] env[62619]: value = "task-1777268" [ 1414.239113] env[62619]: _type = "Task" [ 1414.239113] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.247533] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777268, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.330248] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.445702] env[62619]: DEBUG nova.network.neutron [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Updated VIF entry in instance network info cache for port 902ccd9e-7453-4859-8766-73a3449e946d. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1414.446122] env[62619]: DEBUG nova.network.neutron [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Updating instance_info_cache with network_info: [{"id": "902ccd9e-7453-4859-8766-73a3449e946d", "address": "fa:16:3e:12:1e:a5", "network": {"id": "0ca73c17-f43c-4e56-bf0c-0caa97365dfb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1757370232-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "064c40929d5b478eb56dc54fcb46bc21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap902ccd9e-74", "ovs_interfaceid": "902ccd9e-7453-4859-8766-73a3449e946d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.451269] env[62619]: INFO nova.compute.manager [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Rebuilding instance [ 1414.497695] env[62619]: DEBUG nova.compute.manager [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1414.499926] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3f3ee1-354f-4122-8f5c-674bda8a2b8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.622164] env[62619]: DEBUG oslo_vmware.api [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777264, 'name': PowerOnVM_Task, 'duration_secs': 1.186619} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.622609] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1414.625747] env[62619]: DEBUG nova.compute.manager [None req-bafb698a-e167-4814-9a6b-b07ccfe576c8 tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1414.626719] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8334bad4-b7a5-4967-8849-d41e1b506c79 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.639577] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777265, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.752067] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777268, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.952119] env[62619]: DEBUG oslo_concurrency.lockutils [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] Releasing lock "refresh_cache-3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.952822] env[62619]: DEBUG nova.compute.manager [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Received event network-vif-plugged-28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1414.952822] env[62619]: DEBUG oslo_concurrency.lockutils [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] Acquiring lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.952822] env[62619]: DEBUG oslo_concurrency.lockutils [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] Lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.952969] env[62619]: DEBUG oslo_concurrency.lockutils [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] Lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.953150] env[62619]: DEBUG nova.compute.manager [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] No waiting events found dispatching network-vif-plugged-28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1414.953338] env[62619]: WARNING nova.compute.manager [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Received unexpected event network-vif-plugged-28e9da04-af12-4a21-b4ee-408c492669ef for instance with vm_state building and task_state spawning. [ 1414.953517] env[62619]: DEBUG nova.compute.manager [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Received event network-changed-28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1414.953685] env[62619]: DEBUG nova.compute.manager [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Refreshing instance network info cache due to event network-changed-28e9da04-af12-4a21-b4ee-408c492669ef. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1414.953867] env[62619]: DEBUG oslo_concurrency.lockutils [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] Acquiring lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.954009] env[62619]: DEBUG oslo_concurrency.lockutils [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] Acquired lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.954175] env[62619]: DEBUG nova.network.neutron [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Refreshing network info cache for port 28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1415.132195] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53d6cb7-e8d1-4134-9e41-b85b0b139219 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.148157] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea97acb3-56e2-47a4-a58d-a11a4c53af3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.153802] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777265, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551829} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.155148] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41/3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1415.155624] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1415.156712] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0f6b3711-ac4b-4eee-8b27-0eb10df0b043 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.187751] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25786b5a-fa5b-493e-b25f-245e47919c2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.191024] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for the task: (returnval){ [ 1415.191024] env[62619]: value = "task-1777269" [ 1415.191024] env[62619]: _type = "Task" [ 1415.191024] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.198801] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee18e274-c96a-4ad0-b15e-4021a3506d42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.206546] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777269, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.216458] env[62619]: DEBUG nova.compute.provider_tree [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1415.250913] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777268, 'name': CreateVM_Task, 'duration_secs': 0.621154} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.251177] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1415.251917] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.252125] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.252475] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1415.252753] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-027558a5-786f-4766-8d89-07175c66db18 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.257669] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1415.257669] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da985a-1053-6bdf-826e-18bc34f3be90" [ 1415.257669] env[62619]: _type = "Task" [ 1415.257669] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.266245] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da985a-1053-6bdf-826e-18bc34f3be90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.330967] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e569d5-b35c-445a-b0c5-bdcfc0612fff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.349304] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance 'aa4906f1-e801-4df0-819e-8c5fb5930fb5' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1415.519267] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1415.519568] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0047ff76-3530-4815-9ae8-8a0efa916de9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.527578] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1415.527578] env[62619]: value = "task-1777270" [ 1415.527578] env[62619]: _type = "Task" [ 1415.527578] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.536108] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777270, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.701305] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777269, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149118} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.701621] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1415.702539] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317ab61e-580e-4c5c-96fe-d96e8e2d10be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.724757] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41/3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1415.730280] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ed1166d-7774-4714-b4ee-78d2e26270a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.752023] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for the task: (returnval){ [ 1415.752023] env[62619]: value = "task-1777271" [ 1415.752023] env[62619]: _type = "Task" [ 1415.752023] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.757612] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777271, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.763641] env[62619]: ERROR nova.scheduler.client.report [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [req-d9f8102b-0aa4-49de-9ef1-4c7a5c496a7e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d9f8102b-0aa4-49de-9ef1-4c7a5c496a7e"}]} [ 1415.769537] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da985a-1053-6bdf-826e-18bc34f3be90, 'name': SearchDatastore_Task, 'duration_secs': 0.03225} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.771951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.772194] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1415.772420] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.772563] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.772733] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1415.773067] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c85c659-0998-43ba-b4d0-3a4b54609fdc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.781222] env[62619]: DEBUG nova.scheduler.client.report [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1415.784099] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1415.784321] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1415.785091] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-040e6595-917f-471b-9271-d522917393b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.790080] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1415.790080] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52cc1569-5eae-1fd1-3623-67ab6b524f74" [ 1415.790080] env[62619]: _type = "Task" [ 1415.790080] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.797666] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52cc1569-5eae-1fd1-3623-67ab6b524f74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.798830] env[62619]: DEBUG nova.scheduler.client.report [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1415.799046] env[62619]: DEBUG nova.compute.provider_tree [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1415.817186] env[62619]: DEBUG nova.scheduler.client.report [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1415.855691] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1415.856105] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f26a5dd3-39bd-49f3-b268-ef182b95982f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.863384] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1415.863384] env[62619]: value = "task-1777272" [ 1415.863384] env[62619]: _type = "Task" [ 1415.863384] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.864269] env[62619]: DEBUG nova.scheduler.client.report [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1415.874811] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.038691] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777270, 'name': PowerOffVM_Task, 'duration_secs': 0.116494} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.039509] env[62619]: DEBUG nova.network.neutron [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Updated VIF entry in instance network info cache for port 28e9da04-af12-4a21-b4ee-408c492669ef. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1416.039827] env[62619]: DEBUG nova.network.neutron [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Updating instance_info_cache with network_info: [{"id": "28e9da04-af12-4a21-b4ee-408c492669ef", "address": "fa:16:3e:00:1b:cf", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28e9da04-af", "ovs_interfaceid": "28e9da04-af12-4a21-b4ee-408c492669ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.043273] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1416.043507] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1416.044703] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d905bc0-e2a7-4944-ac41-5fa0eb79ffc8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.051425] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1416.051722] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6d8d513-b344-4c64-97d9-49083a5fa9e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.078897] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1416.079140] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1416.079310] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Deleting the datastore file [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1416.079563] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4be14c5-ec7d-4c3c-8c32-f2e86effa8b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.088874] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1416.088874] env[62619]: value = "task-1777274" [ 1416.088874] env[62619]: _type = "Task" [ 1416.088874] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.097239] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.260736] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777271, 'name': ReconfigVM_Task, 'duration_secs': 0.404895} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.261035] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41/3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1416.262383] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f15886d-62c1-473b-91b1-1c2e87c112d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.267419] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for the task: (returnval){ [ 1416.267419] env[62619]: value = "task-1777275" [ 1416.267419] env[62619]: _type = "Task" [ 1416.267419] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.284558] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777275, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.302812] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52cc1569-5eae-1fd1-3623-67ab6b524f74, 'name': SearchDatastore_Task, 'duration_secs': 0.014841} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.303345] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e50be332-8111-45a7-b920-8423bc3c6d50 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.310048] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1416.310048] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e67800-455b-b65e-b017-ea510c7d55cf" [ 1416.310048] env[62619]: _type = "Task" [ 1416.310048] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.317856] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e67800-455b-b65e-b017-ea510c7d55cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.355457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d562469e-2547-4724-bee1-00a5aea3bd18 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.362906] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a96aab3-7263-4d05-b199-19b30c250e5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.399026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6c31d4-f7ef-4ca5-803e-fbb09fbab68e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.405532] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777272, 'name': PowerOffVM_Task, 'duration_secs': 0.204835} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.406271] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1416.406529] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance 'aa4906f1-e801-4df0-819e-8c5fb5930fb5' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1416.415192] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0df681e-f78e-4976-aeb5-aaf6fb0bf275 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.433139] env[62619]: DEBUG nova.compute.provider_tree [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1416.545288] env[62619]: DEBUG oslo_concurrency.lockutils [req-e946fa36-8fb3-4217-923a-2c2c14ecb00c req-d92e7d24-bedf-4de3-a43e-a6071f2c97b3 service nova] Releasing lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.601105] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777274, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.359929} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.601105] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1416.601105] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1416.601105] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1416.780052] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777275, 'name': Rename_Task, 'duration_secs': 0.156779} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.780368] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1416.780616] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dda4e5da-7f31-457f-8a0c-80523e749cbe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.787448] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for the task: (returnval){ [ 1416.787448] env[62619]: value = "task-1777276" [ 1416.787448] env[62619]: _type = "Task" [ 1416.787448] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.794905] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777276, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.820474] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e67800-455b-b65e-b017-ea510c7d55cf, 'name': SearchDatastore_Task, 'duration_secs': 0.017022} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.821399] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.821399] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2/006c9f0b-4b53-4740-9f67-ec9b19b8bcb2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1416.821399] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-707a32e9-20a3-4892-81a9-01d11a782181 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.827814] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1416.827814] env[62619]: value = "task-1777277" [ 1416.827814] env[62619]: _type = "Task" [ 1416.827814] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.830264] env[62619]: DEBUG nova.compute.manager [req-4883c660-5be3-4c41-a8f4-48cacab7df64 req-cd8990e9-5b08-46b1-b035-664b1022b410 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Received event network-changed-3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1416.830369] env[62619]: DEBUG nova.compute.manager [req-4883c660-5be3-4c41-a8f4-48cacab7df64 req-cd8990e9-5b08-46b1-b035-664b1022b410 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Refreshing instance network info cache due to event network-changed-3e989184-0116-4b59-b0cb-45f895f69e47. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1416.830576] env[62619]: DEBUG oslo_concurrency.lockutils [req-4883c660-5be3-4c41-a8f4-48cacab7df64 req-cd8990e9-5b08-46b1-b035-664b1022b410 service nova] Acquiring lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.830722] env[62619]: DEBUG oslo_concurrency.lockutils [req-4883c660-5be3-4c41-a8f4-48cacab7df64 req-cd8990e9-5b08-46b1-b035-664b1022b410 service nova] Acquired lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.830869] env[62619]: DEBUG nova.network.neutron [req-4883c660-5be3-4c41-a8f4-48cacab7df64 req-cd8990e9-5b08-46b1-b035-664b1022b410 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Refreshing network info cache for port 3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1416.844481] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777277, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.916492] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1416.916732] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1416.916894] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1416.917075] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1416.917227] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1416.917374] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1416.917575] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1416.917729] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1416.917891] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1416.918062] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1416.918241] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1416.924829] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73649ef6-c848-4685-90a4-a41c90126a50 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.943141] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1416.943141] env[62619]: value = "task-1777278" [ 1416.943141] env[62619]: _type = "Task" [ 1416.943141] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.952530] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777278, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.977435] env[62619]: DEBUG nova.scheduler.client.report [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 49 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1416.977659] env[62619]: DEBUG nova.compute.provider_tree [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 49 to 50 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1416.977835] env[62619]: DEBUG nova.compute.provider_tree [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1417.302968] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777276, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.342068] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777277, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.454654] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777278, 'name': ReconfigVM_Task, 'duration_secs': 0.189066} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.455010] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance 'aa4906f1-e801-4df0-819e-8c5fb5930fb5' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1417.486568] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.830s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.487211] env[62619]: DEBUG nova.compute.manager [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1417.492026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.565s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.492699] env[62619]: INFO nova.compute.claims [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1417.639857] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1417.640149] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1417.640305] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1417.640482] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1417.640622] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1417.640763] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1417.640963] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1417.641661] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1417.641911] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1417.642106] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1417.642375] env[62619]: DEBUG nova.virt.hardware [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1417.643297] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec901511-46c8-4e30-8ffe-6c11d7581dc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.647898] env[62619]: DEBUG nova.network.neutron [req-4883c660-5be3-4c41-a8f4-48cacab7df64 req-cd8990e9-5b08-46b1-b035-664b1022b410 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updated VIF entry in instance network info cache for port 3e989184-0116-4b59-b0cb-45f895f69e47. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1417.648238] env[62619]: DEBUG nova.network.neutron [req-4883c660-5be3-4c41-a8f4-48cacab7df64 req-cd8990e9-5b08-46b1-b035-664b1022b410 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updating instance_info_cache with network_info: [{"id": "3e989184-0116-4b59-b0cb-45f895f69e47", "address": "fa:16:3e:69:5f:b0", "network": {"id": "458b9515-f6cd-49e3-b6f2-594543a98f0d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1747794949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "22106ed527594810885b6891b382c3ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e989184-01", "ovs_interfaceid": "3e989184-0116-4b59-b0cb-45f895f69e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.655117] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4653cac-5ccb-4b33-8247-5b750b01551b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.671685] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1417.678542] env[62619]: DEBUG oslo.service.loopingcall [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1417.678820] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1417.679145] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72b42a90-d1b9-431b-a547-7e1a6767395e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.698852] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1417.698852] env[62619]: value = "task-1777279" [ 1417.698852] env[62619]: _type = "Task" [ 1417.698852] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.708334] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777279, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.799936] env[62619]: DEBUG oslo_vmware.api [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777276, 'name': PowerOnVM_Task, 'duration_secs': 0.783231} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.804680] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1417.808064] env[62619]: INFO nova.compute.manager [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Took 9.65 seconds to spawn the instance on the hypervisor. [ 1417.808335] env[62619]: DEBUG nova.compute.manager [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1417.809207] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6a439f-5e9c-4e22-8c8a-578d90746e56 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.838649] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777277, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.700944} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.838954] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2/006c9f0b-4b53-4740-9f67-ec9b19b8bcb2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1417.839221] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1417.839524] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e3b4902-0ba4-4c15-bce1-113cad62c3c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.847120] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1417.847120] env[62619]: value = "task-1777280" [ 1417.847120] env[62619]: _type = "Task" [ 1417.847120] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.855207] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777280, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.962145] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1417.962449] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1417.962642] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1417.962917] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1417.963124] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1417.963311] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1417.963546] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1417.963750] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1417.963951] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1417.964162] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1417.964368] env[62619]: DEBUG nova.virt.hardware [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1417.969881] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Reconfiguring VM instance instance-0000000a to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1417.970241] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cab95ea1-24fa-43d1-957b-e552db0cac09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.989270] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1417.989270] env[62619]: value = "task-1777281" [ 1417.989270] env[62619]: _type = "Task" [ 1417.989270] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.002274] env[62619]: DEBUG nova.compute.utils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1418.006610] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777281, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.006610] env[62619]: DEBUG nova.compute.manager [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1418.006610] env[62619]: DEBUG nova.network.neutron [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1418.081079] env[62619]: DEBUG nova.policy [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d22fdab903748ea951f8a5527b05517', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9095d12e90a24ba9ad1f6d6d16283a3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1418.151610] env[62619]: DEBUG oslo_concurrency.lockutils [req-4883c660-5be3-4c41-a8f4-48cacab7df64 req-cd8990e9-5b08-46b1-b035-664b1022b410 service nova] Releasing lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.213579] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777279, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.327263] env[62619]: INFO nova.compute.manager [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Took 40.59 seconds to build instance. [ 1418.366776] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777280, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.278422} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.369510] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1418.369510] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624dc0c6-62e6-4699-a974-964906d835f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.399307] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2/006c9f0b-4b53-4740-9f67-ec9b19b8bcb2.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1418.400073] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6f7e374-9bb0-428c-ba00-104d44952c25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.422703] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1418.422703] env[62619]: value = "task-1777282" [ 1418.422703] env[62619]: _type = "Task" [ 1418.422703] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.432833] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777282, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.499887] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777281, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.507826] env[62619]: DEBUG nova.compute.manager [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1418.637222] env[62619]: DEBUG nova.network.neutron [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Successfully created port: 8f4508cc-e861-44f6-82c8-f82bf1da6ef5 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1418.716782] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777279, 'name': CreateVM_Task, 'duration_secs': 0.825793} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.720016] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1418.721238] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.721238] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.721394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1418.721683] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8c47596-20a6-4177-85ef-1e9f6236aa41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.726552] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1418.726552] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c571c2-893b-0ab8-c601-2150a33dfaaf" [ 1418.726552] env[62619]: _type = "Task" [ 1418.726552] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.735376] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c571c2-893b-0ab8-c601-2150a33dfaaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.831147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90aca3fe-02d4-4ffe-bc48-f3e00b709524 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.380s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.933871] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777282, 'name': ReconfigVM_Task, 'duration_secs': 0.429714} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.934293] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2/006c9f0b-4b53-4740-9f67-ec9b19b8bcb2.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1418.934995] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9799521c-989a-4a49-87d6-e1bf22cd0fd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.944174] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1418.944174] env[62619]: value = "task-1777283" [ 1418.944174] env[62619]: _type = "Task" [ 1418.944174] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.951351] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777283, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.009199] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777281, 'name': ReconfigVM_Task, 'duration_secs': 0.538905} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.009317] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Reconfigured VM instance instance-0000000a to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1419.013978] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a456593-442b-4584-8b84-1803c0c91a75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.042899] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] aa4906f1-e801-4df0-819e-8c5fb5930fb5/aa4906f1-e801-4df0-819e-8c5fb5930fb5.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1419.045797] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79e23d0c-25d8-4c90-9ce4-bba4d703bfcb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.065964] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1419.065964] env[62619]: value = "task-1777284" [ 1419.065964] env[62619]: _type = "Task" [ 1419.065964] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.077755] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777284, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.124028] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03245022-b60a-4466-b064-6c9a8d95c8ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.133721] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9c0a1b-4925-4619-b196-8041ccef6afc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.166624] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294b5251-5b9c-4e05-9344-65dd3eacfd9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.175326] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931bc025-b262-4e8c-950d-0c4d151533bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.188718] env[62619]: DEBUG nova.compute.provider_tree [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1419.238755] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c571c2-893b-0ab8-c601-2150a33dfaaf, 'name': SearchDatastore_Task, 'duration_secs': 0.01275} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.239085] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.239332] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1419.239622] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.239771] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.239948] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1419.240230] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2176d6f7-51ae-4849-a440-57324d2d14b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.249171] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1419.249375] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1419.250051] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec277b47-da0e-4c7e-ac1a-a60ef8baca32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.255750] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1419.255750] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f26e9d-4dce-764c-0ab0-e82a6ed65e24" [ 1419.255750] env[62619]: _type = "Task" [ 1419.255750] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.264067] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f26e9d-4dce-764c-0ab0-e82a6ed65e24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.333772] env[62619]: DEBUG nova.compute.manager [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1419.452659] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777283, 'name': Rename_Task, 'duration_secs': 0.232508} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.452957] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1419.453228] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae3b537f-0094-47a6-87af-2f6083bc03d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.459948] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1419.459948] env[62619]: value = "task-1777285" [ 1419.459948] env[62619]: _type = "Task" [ 1419.459948] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.467322] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777285, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.524832] env[62619]: DEBUG nova.compute.manager [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1419.530600] env[62619]: DEBUG nova.compute.manager [req-138526e2-13d7-43cc-afdf-db21adb1ae07 req-661dda98-ed91-43f0-9a6b-54cb1954af82 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Received event network-changed-3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1419.530600] env[62619]: DEBUG nova.compute.manager [req-138526e2-13d7-43cc-afdf-db21adb1ae07 req-661dda98-ed91-43f0-9a6b-54cb1954af82 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Refreshing instance network info cache due to event network-changed-3e989184-0116-4b59-b0cb-45f895f69e47. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1419.530891] env[62619]: DEBUG oslo_concurrency.lockutils [req-138526e2-13d7-43cc-afdf-db21adb1ae07 req-661dda98-ed91-43f0-9a6b-54cb1954af82 service nova] Acquiring lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.531500] env[62619]: DEBUG oslo_concurrency.lockutils [req-138526e2-13d7-43cc-afdf-db21adb1ae07 req-661dda98-ed91-43f0-9a6b-54cb1954af82 service nova] Acquired lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.531696] env[62619]: DEBUG nova.network.neutron [req-138526e2-13d7-43cc-afdf-db21adb1ae07 req-661dda98-ed91-43f0-9a6b-54cb1954af82 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Refreshing network info cache for port 3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1419.560586] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1419.560863] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1419.561028] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1419.561234] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1419.561369] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1419.561511] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1419.561715] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1419.561886] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1419.562063] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1419.562228] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1419.562396] env[62619]: DEBUG nova.virt.hardware [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1419.563625] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156ac0b5-5c03-4b04-a9d9-a272c3111a6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.575745] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.580335] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1acff1d-87b7-408c-8d5d-521b341f4d90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.692806] env[62619]: DEBUG nova.scheduler.client.report [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1419.767429] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f26e9d-4dce-764c-0ab0-e82a6ed65e24, 'name': SearchDatastore_Task, 'duration_secs': 0.012513} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.767966] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1592e7f-0857-4960-b787-f8723a961214 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.773652] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1419.773652] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52acd299-dd19-7eef-3167-e252ae1e3f01" [ 1419.773652] env[62619]: _type = "Task" [ 1419.773652] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.786698] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52acd299-dd19-7eef-3167-e252ae1e3f01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.866233] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.971793] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777285, 'name': PowerOnVM_Task} progress is 76%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.076777] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777284, 'name': ReconfigVM_Task, 'duration_secs': 0.768541} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.077095] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Reconfigured VM instance instance-0000000a to attach disk [datastore1] aa4906f1-e801-4df0-819e-8c5fb5930fb5/aa4906f1-e801-4df0-819e-8c5fb5930fb5.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1420.077389] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance 'aa4906f1-e801-4df0-819e-8c5fb5930fb5' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1420.199609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.708s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.200281] env[62619]: DEBUG nova.compute.manager [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1420.206148] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.784s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.207640] env[62619]: INFO nova.compute.claims [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1420.288066] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52acd299-dd19-7eef-3167-e252ae1e3f01, 'name': SearchDatastore_Task, 'duration_secs': 0.022564} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.288361] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.288617] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1420.288886] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65ee64cb-ea6e-4553-9c16-ffb228467d85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.296185] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1420.296185] env[62619]: value = "task-1777286" [ 1420.296185] env[62619]: _type = "Task" [ 1420.296185] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.306028] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777286, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.471175] env[62619]: DEBUG oslo_vmware.api [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777285, 'name': PowerOnVM_Task, 'duration_secs': 0.859576} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.471480] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1420.471950] env[62619]: INFO nova.compute.manager [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Took 9.68 seconds to spawn the instance on the hypervisor. [ 1420.472162] env[62619]: DEBUG nova.compute.manager [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1420.472946] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41020dfd-3995-478b-8b4d-29e33e4f2011 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.587304] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38edaf3-746e-4489-99d0-8b40d01be982 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.608213] env[62619]: DEBUG nova.network.neutron [req-138526e2-13d7-43cc-afdf-db21adb1ae07 req-661dda98-ed91-43f0-9a6b-54cb1954af82 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updated VIF entry in instance network info cache for port 3e989184-0116-4b59-b0cb-45f895f69e47. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1420.608575] env[62619]: DEBUG nova.network.neutron [req-138526e2-13d7-43cc-afdf-db21adb1ae07 req-661dda98-ed91-43f0-9a6b-54cb1954af82 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updating instance_info_cache with network_info: [{"id": "3e989184-0116-4b59-b0cb-45f895f69e47", "address": "fa:16:3e:69:5f:b0", "network": {"id": "458b9515-f6cd-49e3-b6f2-594543a98f0d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1747794949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "22106ed527594810885b6891b382c3ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e989184-01", "ovs_interfaceid": "3e989184-0116-4b59-b0cb-45f895f69e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.610281] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260556e6-c399-4772-887c-1a95d7ef3662 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.632757] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance 'aa4906f1-e801-4df0-819e-8c5fb5930fb5' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1420.722036] env[62619]: DEBUG nova.compute.utils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1420.725095] env[62619]: DEBUG nova.compute.manager [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1420.726256] env[62619]: DEBUG nova.network.neutron [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1420.810161] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777286, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.847482] env[62619]: DEBUG nova.policy [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9323b6669f0245f799d723270536052d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b9c1aa2b9334d409120b93382a7e770', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1421.000285] env[62619]: INFO nova.compute.manager [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Took 42.73 seconds to build instance. [ 1421.083573] env[62619]: DEBUG nova.network.neutron [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Successfully updated port: 8f4508cc-e861-44f6-82c8-f82bf1da6ef5 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1421.114122] env[62619]: DEBUG oslo_concurrency.lockutils [req-138526e2-13d7-43cc-afdf-db21adb1ae07 req-661dda98-ed91-43f0-9a6b-54cb1954af82 service nova] Releasing lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.227850] env[62619]: DEBUG nova.compute.manager [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1421.247333] env[62619]: DEBUG nova.network.neutron [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Port 5530a947-e30a-4156-be0f-8e8dd90d2aef binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1421.312516] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777286, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.791437} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.315312] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1421.315766] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1421.316759] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3060efae-fd5d-469c-90d2-e06422e015c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.324620] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1421.324620] env[62619]: value = "task-1777287" [ 1421.324620] env[62619]: _type = "Task" [ 1421.324620] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.337872] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777287, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.476187] env[62619]: DEBUG nova.network.neutron [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Successfully created port: 19b98734-638c-48c1-aa4b-58c310858ab7 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1421.503615] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a70487bd-c8df-42e7-a557-5929faa05242 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.404s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.589041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquiring lock "refresh_cache-4374c102-a6fe-45ef-ad49-a1295f96899a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.589041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquired lock "refresh_cache-4374c102-a6fe-45ef-ad49-a1295f96899a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.589041] env[62619]: DEBUG nova.network.neutron [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1421.793033] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e57f2f-e564-472c-8a83-ec0437be40a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.802087] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be226b6c-2b69-4191-8b48-f9b9fd2c4b22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.842814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8e7336-6433-490c-bb23-7d1dbcf259f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.854415] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777287, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073166} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.855428] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e71239f-521c-48ea-8fcc-4dd4b2b5ad07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.860017] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1421.860900] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff1d9f7-18b1-4ec8-9912-cba7c8a13130 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.876864] env[62619]: DEBUG nova.compute.provider_tree [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1421.894367] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1421.896670] env[62619]: DEBUG nova.scheduler.client.report [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1421.899273] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6bf9049-4537-4029-9011-aef96cfb7f68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.923189] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1421.923189] env[62619]: value = "task-1777288" [ 1421.923189] env[62619]: _type = "Task" [ 1421.923189] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.933029] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777288, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.007960] env[62619]: DEBUG nova.compute.manager [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1422.139581] env[62619]: DEBUG nova.network.neutron [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1422.243265] env[62619]: DEBUG nova.compute.manager [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1422.288066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.288492] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.289067] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.296563] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1422.296786] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1422.296943] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1422.297146] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1422.297292] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1422.297456] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1422.297879] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1422.297879] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1422.298064] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1422.298175] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1422.298353] env[62619]: DEBUG nova.virt.hardware [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1422.299531] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca82f71-d877-4a12-88dd-3acc2dccce69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.309973] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4b0dcb-dbf4-4ce7-be22-da7703dc78db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.416827] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.211s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.417503] env[62619]: DEBUG nova.compute.manager [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1422.423449] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.607s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.425567] env[62619]: INFO nova.compute.claims [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1422.444799] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.447507] env[62619]: DEBUG nova.network.neutron [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Updating instance_info_cache with network_info: [{"id": "8f4508cc-e861-44f6-82c8-f82bf1da6ef5", "address": "fa:16:3e:67:7f:d5", "network": {"id": "3d039fbc-9217-49ca-9b7a-60e61ebc5fc7", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1785525003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9095d12e90a24ba9ad1f6d6d16283a3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f4508cc-e8", "ovs_interfaceid": "8f4508cc-e861-44f6-82c8-f82bf1da6ef5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.454311] env[62619]: DEBUG nova.compute.manager [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Received event network-vif-plugged-8f4508cc-e861-44f6-82c8-f82bf1da6ef5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1422.455067] env[62619]: DEBUG oslo_concurrency.lockutils [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] Acquiring lock "4374c102-a6fe-45ef-ad49-a1295f96899a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.455067] env[62619]: DEBUG oslo_concurrency.lockutils [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] Lock "4374c102-a6fe-45ef-ad49-a1295f96899a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.455067] env[62619]: DEBUG oslo_concurrency.lockutils [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] Lock "4374c102-a6fe-45ef-ad49-a1295f96899a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.457161] env[62619]: DEBUG nova.compute.manager [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] No waiting events found dispatching network-vif-plugged-8f4508cc-e861-44f6-82c8-f82bf1da6ef5 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1422.457161] env[62619]: WARNING nova.compute.manager [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Received unexpected event network-vif-plugged-8f4508cc-e861-44f6-82c8-f82bf1da6ef5 for instance with vm_state building and task_state spawning. [ 1422.457161] env[62619]: DEBUG nova.compute.manager [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Received event network-changed-3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1422.457161] env[62619]: DEBUG nova.compute.manager [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Refreshing instance network info cache due to event network-changed-3e989184-0116-4b59-b0cb-45f895f69e47. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1422.457575] env[62619]: DEBUG oslo_concurrency.lockutils [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] Acquiring lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.457575] env[62619]: DEBUG oslo_concurrency.lockutils [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] Acquired lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.457782] env[62619]: DEBUG nova.network.neutron [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Refreshing network info cache for port 3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1422.530677] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.933188] env[62619]: DEBUG nova.compute.utils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1422.941590] env[62619]: DEBUG nova.compute.manager [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1422.941932] env[62619]: DEBUG nova.network.neutron [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1422.952231] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Releasing lock "refresh_cache-4374c102-a6fe-45ef-ad49-a1295f96899a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.952533] env[62619]: DEBUG nova.compute.manager [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Instance network_info: |[{"id": "8f4508cc-e861-44f6-82c8-f82bf1da6ef5", "address": "fa:16:3e:67:7f:d5", "network": {"id": "3d039fbc-9217-49ca-9b7a-60e61ebc5fc7", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1785525003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9095d12e90a24ba9ad1f6d6d16283a3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f4508cc-e8", "ovs_interfaceid": "8f4508cc-e861-44f6-82c8-f82bf1da6ef5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1422.957355] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:7f:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16e15a36-a55b-4c27-b864-f284339009d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f4508cc-e861-44f6-82c8-f82bf1da6ef5', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1422.965576] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Creating folder: Project (9095d12e90a24ba9ad1f6d6d16283a3e). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1422.969856] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b7dcc8a-ac00-4048-b438-9ab944463d9e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.978622] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777288, 'name': ReconfigVM_Task, 'duration_secs': 0.87877} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.979662] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Reconfigured VM instance instance-00000014 to attach disk [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1422.980554] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbfe8405-fc03-4098-bc4b-757cb4adcb8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.989106] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1422.989106] env[62619]: value = "task-1777290" [ 1422.989106] env[62619]: _type = "Task" [ 1422.989106] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.990322] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Created folder: Project (9095d12e90a24ba9ad1f6d6d16283a3e) in parent group-v368875. [ 1422.990495] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Creating folder: Instances. Parent ref: group-v368939. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1422.993557] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20048ec3-fad6-47f4-a7d8-8c0a3a0233ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.001277] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777290, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.005167] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Created folder: Instances in parent group-v368939. [ 1423.005509] env[62619]: DEBUG oslo.service.loopingcall [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1423.005608] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1423.005804] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08f30c79-f264-49f1-880e-0323fc89110c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.028501] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1423.028501] env[62619]: value = "task-1777292" [ 1423.028501] env[62619]: _type = "Task" [ 1423.028501] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.037567] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777292, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.110619] env[62619]: DEBUG nova.policy [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b0ec7a74ec944dd8b0417df0178ab15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e471e7b32b154c1db2eac990fd11e539', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1423.345653] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.345752] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.345893] env[62619]: DEBUG nova.network.neutron [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1423.443195] env[62619]: DEBUG nova.compute.manager [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1423.504117] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777290, 'name': Rename_Task, 'duration_secs': 0.176865} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.506823] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1423.507307] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-333da768-1679-461a-8fb5-f1c4d589758c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.518153] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1423.518153] env[62619]: value = "task-1777293" [ 1423.518153] env[62619]: _type = "Task" [ 1423.518153] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.529730] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777293, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.542062] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777292, 'name': CreateVM_Task, 'duration_secs': 0.41931} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.542062] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1423.542529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.542529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.542639] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1423.543212] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0182ee3b-4501-47a5-b43a-62afd512ff86 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.549652] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for the task: (returnval){ [ 1423.549652] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528c113e-90b2-44a1-d13e-a8352b948fd7" [ 1423.549652] env[62619]: _type = "Task" [ 1423.549652] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.563486] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528c113e-90b2-44a1-d13e-a8352b948fd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.740400] env[62619]: DEBUG nova.network.neutron [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updated VIF entry in instance network info cache for port 3e989184-0116-4b59-b0cb-45f895f69e47. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1423.740732] env[62619]: DEBUG nova.network.neutron [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updating instance_info_cache with network_info: [{"id": "3e989184-0116-4b59-b0cb-45f895f69e47", "address": "fa:16:3e:69:5f:b0", "network": {"id": "458b9515-f6cd-49e3-b6f2-594543a98f0d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1747794949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "22106ed527594810885b6891b382c3ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e989184-01", "ovs_interfaceid": "3e989184-0116-4b59-b0cb-45f895f69e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.978657] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa4d7db-15a1-451f-b025-3521bacde72b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.988672] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f85912-409a-4896-bb08-f1c4f8b538a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.035857] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8191b1-ed45-4ee5-b750-13f0f77ac86b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.047574] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777293, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.048798] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69082ba-5b92-4ca6-b150-6eebf2287cbf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.067442] env[62619]: DEBUG nova.compute.provider_tree [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1424.078651] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528c113e-90b2-44a1-d13e-a8352b948fd7, 'name': SearchDatastore_Task, 'duration_secs': 0.019061} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.079471] env[62619]: DEBUG nova.scheduler.client.report [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1424.082757] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.083014] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.083305] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.083547] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1424.083783] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.083896] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.084080] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1424.084860] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-943cdcc3-00f2-435f-89db-8eb082948f4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.095972] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1424.096170] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1424.097814] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33640240-22b4-4606-9c01-f711fed04783 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.104424] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for the task: (returnval){ [ 1424.104424] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f87e5f-8533-4150-5a27-d08f69174d02" [ 1424.104424] env[62619]: _type = "Task" [ 1424.104424] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.113990] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f87e5f-8533-4150-5a27-d08f69174d02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.131526] env[62619]: DEBUG nova.network.neutron [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Successfully updated port: 19b98734-638c-48c1-aa4b-58c310858ab7 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1424.188374] env[62619]: DEBUG nova.network.neutron [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Successfully created port: a7a7cbea-2a80-4996-a6d0-8a345c83069f {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1424.244485] env[62619]: DEBUG oslo_concurrency.lockutils [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] Releasing lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.244782] env[62619]: DEBUG nova.compute.manager [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Received event network-changed-8f4508cc-e861-44f6-82c8-f82bf1da6ef5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1424.245015] env[62619]: DEBUG nova.compute.manager [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Refreshing instance network info cache due to event network-changed-8f4508cc-e861-44f6-82c8-f82bf1da6ef5. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1424.245250] env[62619]: DEBUG oslo_concurrency.lockutils [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] Acquiring lock "refresh_cache-4374c102-a6fe-45ef-ad49-a1295f96899a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.245397] env[62619]: DEBUG oslo_concurrency.lockutils [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] Acquired lock "refresh_cache-4374c102-a6fe-45ef-ad49-a1295f96899a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.245554] env[62619]: DEBUG nova.network.neutron [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Refreshing network info cache for port 8f4508cc-e861-44f6-82c8-f82bf1da6ef5 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.452022] env[62619]: DEBUG nova.compute.manager [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1424.484017] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1424.484017] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1424.484017] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1424.484890] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1424.484890] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1424.484890] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1424.484890] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1424.484890] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1424.485063] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1424.485063] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1424.485063] env[62619]: DEBUG nova.virt.hardware [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1424.485063] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c002651c-5c33-4333-92f5-292e4a9e7546 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.493536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fdf9ca-88bd-4057-ab6f-3ba3235c6f81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.542151] env[62619]: DEBUG oslo_vmware.api [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777293, 'name': PowerOnVM_Task, 'duration_secs': 0.843133} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.542498] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1424.542631] env[62619]: DEBUG nova.compute.manager [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1424.543458] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e89a78a-0d9f-4a0f-a8c4-bb52f2f1b4b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.572915] env[62619]: DEBUG nova.network.neutron [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance_info_cache with network_info: [{"id": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "address": "fa:16:3e:96:ec:35", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5530a947-e3", "ovs_interfaceid": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.586839] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.163s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.587081] env[62619]: DEBUG nova.compute.manager [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1424.590631] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.625s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.591621] env[62619]: INFO nova.compute.claims [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1424.618049] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f87e5f-8533-4150-5a27-d08f69174d02, 'name': SearchDatastore_Task, 'duration_secs': 0.018088} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.618902] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe439c4f-fc48-4c31-b539-230428c30413 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.625891] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for the task: (returnval){ [ 1424.625891] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5202b784-ef21-1e1e-675d-cc20813c7d1b" [ 1424.625891] env[62619]: _type = "Task" [ 1424.625891] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.635821] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquiring lock "refresh_cache-91ce0ab3-4fa4-4992-995a-0baeec91d9d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.635821] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquired lock "refresh_cache-91ce0ab3-4fa4-4992-995a-0baeec91d9d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.636244] env[62619]: DEBUG nova.network.neutron [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1424.637269] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5202b784-ef21-1e1e-675d-cc20813c7d1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.064930] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.075642] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.097459] env[62619]: DEBUG nova.compute.utils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1425.100942] env[62619]: DEBUG nova.compute.manager [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1425.101172] env[62619]: DEBUG nova.network.neutron [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1425.146171] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5202b784-ef21-1e1e-675d-cc20813c7d1b, 'name': SearchDatastore_Task, 'duration_secs': 0.014082} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.150146] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.150146] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4374c102-a6fe-45ef-ad49-a1295f96899a/4374c102-a6fe-45ef-ad49-a1295f96899a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1425.150146] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04f7028f-9d7b-4fa0-8806-7e9cae3dcafc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.156976] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for the task: (returnval){ [ 1425.156976] env[62619]: value = "task-1777294" [ 1425.156976] env[62619]: _type = "Task" [ 1425.156976] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.165707] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777294, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.171581] env[62619]: DEBUG nova.network.neutron [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1425.314395] env[62619]: DEBUG nova.network.neutron [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Updating instance_info_cache with network_info: [{"id": "19b98734-638c-48c1-aa4b-58c310858ab7", "address": "fa:16:3e:ad:1a:90", "network": {"id": "788c512c-28df-4f6e-9573-3ba4f90ef1bd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1238022785-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b9c1aa2b9334d409120b93382a7e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53ebf5df-5ecb-4a0c-a163-d88165639de0", "external-id": "nsx-vlan-transportzone-588", "segmentation_id": 588, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19b98734-63", "ovs_interfaceid": "19b98734-638c-48c1-aa4b-58c310858ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.381728] env[62619]: DEBUG nova.policy [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e6ea39cb8e7487cacc8e399961e97c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '579c1e7229664b1d979a87a476d1de65', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1425.602557] env[62619]: DEBUG nova.compute.manager [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1425.621660] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98bd000-8b27-40d2-94d4-f896834dcece {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.660264] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcac2f7-c9e4-42c5-be36-e951960471cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.680308] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777294, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.687029] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance 'aa4906f1-e801-4df0-819e-8c5fb5930fb5' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1425.702730] env[62619]: DEBUG nova.network.neutron [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Updated VIF entry in instance network info cache for port 8f4508cc-e861-44f6-82c8-f82bf1da6ef5. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1425.702730] env[62619]: DEBUG nova.network.neutron [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Updating instance_info_cache with network_info: [{"id": "8f4508cc-e861-44f6-82c8-f82bf1da6ef5", "address": "fa:16:3e:67:7f:d5", "network": {"id": "3d039fbc-9217-49ca-9b7a-60e61ebc5fc7", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1785525003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9095d12e90a24ba9ad1f6d6d16283a3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f4508cc-e8", "ovs_interfaceid": "8f4508cc-e861-44f6-82c8-f82bf1da6ef5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.817682] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Releasing lock "refresh_cache-91ce0ab3-4fa4-4992-995a-0baeec91d9d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.818088] env[62619]: DEBUG nova.compute.manager [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Instance network_info: |[{"id": "19b98734-638c-48c1-aa4b-58c310858ab7", "address": "fa:16:3e:ad:1a:90", "network": {"id": "788c512c-28df-4f6e-9573-3ba4f90ef1bd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1238022785-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b9c1aa2b9334d409120b93382a7e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53ebf5df-5ecb-4a0c-a163-d88165639de0", "external-id": "nsx-vlan-transportzone-588", "segmentation_id": 588, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19b98734-63", "ovs_interfaceid": "19b98734-638c-48c1-aa4b-58c310858ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1425.818576] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:1a:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53ebf5df-5ecb-4a0c-a163-d88165639de0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19b98734-638c-48c1-aa4b-58c310858ab7', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1425.829827] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Creating folder: Project (2b9c1aa2b9334d409120b93382a7e770). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1425.830742] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01a6febc-505c-4fb3-a9ef-6a984c925a85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.848219] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Created folder: Project (2b9c1aa2b9334d409120b93382a7e770) in parent group-v368875. [ 1425.848449] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Creating folder: Instances. Parent ref: group-v368942. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1425.849426] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf6c09da-9128-408e-8226-b1e61edf346c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.864832] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Created folder: Instances in parent group-v368942. [ 1425.865131] env[62619]: DEBUG oslo.service.loopingcall [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1425.867930] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1425.869096] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e785973-6a9c-4e1e-b90f-0ffa6aa42b4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.896591] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1425.896591] env[62619]: value = "task-1777297" [ 1425.896591] env[62619]: _type = "Task" [ 1425.896591] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.907893] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777297, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.163962] env[62619]: DEBUG nova.compute.manager [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Received event network-vif-plugged-19b98734-638c-48c1-aa4b-58c310858ab7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1426.165051] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] Acquiring lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.165254] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] Lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.165428] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] Lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.165596] env[62619]: DEBUG nova.compute.manager [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] No waiting events found dispatching network-vif-plugged-19b98734-638c-48c1-aa4b-58c310858ab7 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1426.165756] env[62619]: WARNING nova.compute.manager [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Received unexpected event network-vif-plugged-19b98734-638c-48c1-aa4b-58c310858ab7 for instance with vm_state building and task_state spawning. [ 1426.165910] env[62619]: DEBUG nova.compute.manager [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Received event network-changed-3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1426.166076] env[62619]: DEBUG nova.compute.manager [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Refreshing instance network info cache due to event network-changed-3e989184-0116-4b59-b0cb-45f895f69e47. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1426.166258] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] Acquiring lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.166387] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] Acquired lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.166536] env[62619]: DEBUG nova.network.neutron [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Refreshing network info cache for port 3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1426.182554] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777294, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586731} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.182869] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4374c102-a6fe-45ef-ad49-a1295f96899a/4374c102-a6fe-45ef-ad49-a1295f96899a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1426.183098] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1426.183356] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e80f00a8-bf69-48f0-bbd2-4e64a121e31e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.192702] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1426.193254] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a61e18a4-e00e-4642-85f4-de4e1ddf62bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.196378] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for the task: (returnval){ [ 1426.196378] env[62619]: value = "task-1777298" [ 1426.196378] env[62619]: _type = "Task" [ 1426.196378] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.202375] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1426.202375] env[62619]: value = "task-1777299" [ 1426.202375] env[62619]: _type = "Task" [ 1426.202375] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.208243] env[62619]: DEBUG oslo_concurrency.lockutils [req-526f2dbc-0d56-415e-b39c-b57fa3f1bfec req-773940b6-cbf9-4de7-8bba-5bed7637c803 service nova] Releasing lock "refresh_cache-4374c102-a6fe-45ef-ad49-a1295f96899a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.208702] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777298, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.219132] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777299, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.234527] env[62619]: DEBUG nova.compute.manager [req-27728b43-b4e5-4871-83ee-3be97b15a49e req-4bbc2174-246d-41c0-8e9a-619cd3ff7db5 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Received event network-changed-28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1426.237124] env[62619]: DEBUG nova.compute.manager [req-27728b43-b4e5-4871-83ee-3be97b15a49e req-4bbc2174-246d-41c0-8e9a-619cd3ff7db5 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Refreshing instance network info cache due to event network-changed-28e9da04-af12-4a21-b4ee-408c492669ef. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1426.237124] env[62619]: DEBUG oslo_concurrency.lockutils [req-27728b43-b4e5-4871-83ee-3be97b15a49e req-4bbc2174-246d-41c0-8e9a-619cd3ff7db5 service nova] Acquiring lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.237124] env[62619]: DEBUG oslo_concurrency.lockutils [req-27728b43-b4e5-4871-83ee-3be97b15a49e req-4bbc2174-246d-41c0-8e9a-619cd3ff7db5 service nova] Acquired lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.237124] env[62619]: DEBUG nova.network.neutron [req-27728b43-b4e5-4871-83ee-3be97b15a49e req-4bbc2174-246d-41c0-8e9a-619cd3ff7db5 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Refreshing network info cache for port 28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1426.274912] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d897048-1dab-40a8-bd50-f65ac4b52b3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.282296] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7807aa2-848e-4924-9dca-68726a0e8d5f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.319813] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484aa2b3-3f25-42cd-9ef3-a1676b2fa368 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.331733] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275c6a1a-0850-497a-80d1-846d4e146418 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.353217] env[62619]: DEBUG nova.compute.provider_tree [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.407821] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777297, 'name': CreateVM_Task, 'duration_secs': 0.463747} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.407943] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1426.408889] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.409224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.409582] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1426.409871] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6b9794b-eb66-4e4f-b6ac-aa9b83f9949a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.416106] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for the task: (returnval){ [ 1426.416106] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eaef9f-cdf6-aedb-e37d-08bb35b614e9" [ 1426.416106] env[62619]: _type = "Task" [ 1426.416106] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.424889] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eaef9f-cdf6-aedb-e37d-08bb35b614e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.581396] env[62619]: DEBUG nova.network.neutron [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Successfully created port: 5ce94831-5f9c-4556-936d-e71ee8d04f45 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1426.619377] env[62619]: DEBUG nova.compute.manager [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1426.646334] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1426.646596] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1426.646752] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1426.646928] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1426.647226] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1426.647299] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1426.647445] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1426.647599] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1426.647762] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1426.647918] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1426.648103] env[62619]: DEBUG nova.virt.hardware [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1426.648959] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d4c529-3e40-4df4-b581-43e0060337b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.657787] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8bf0ec-decd-4f61-bc88-d104a7ecaebf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.708563] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777298, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140659} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.711986] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1426.713064] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88fc2cd-0670-4b60-959f-3df65a8b0ecc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.738291] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 4374c102-a6fe-45ef-ad49-a1295f96899a/4374c102-a6fe-45ef-ad49-a1295f96899a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1426.738911] env[62619]: DEBUG oslo_vmware.api [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777299, 'name': PowerOnVM_Task, 'duration_secs': 0.442616} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.739136] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7ffa972-2260-478b-a6d9-7f0d217879b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.753444] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1426.753637] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0cec18-60b6-43af-914c-80e0de45b650 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance 'aa4906f1-e801-4df0-819e-8c5fb5930fb5' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1426.766420] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for the task: (returnval){ [ 1426.766420] env[62619]: value = "task-1777300" [ 1426.766420] env[62619]: _type = "Task" [ 1426.766420] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.776762] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.857828] env[62619]: DEBUG nova.scheduler.client.report [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1426.930058] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eaef9f-cdf6-aedb-e37d-08bb35b614e9, 'name': SearchDatastore_Task, 'duration_secs': 0.047861} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.930637] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.931061] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1426.931537] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.931848] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.932256] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1426.932684] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-210e4fa7-d359-4cd4-9d1b-2a881f207435 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.945495] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1426.945724] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1426.946516] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ed045c7-5f7f-4eec-9283-9ffc1fb6fa6e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.952595] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for the task: (returnval){ [ 1426.952595] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a08a1c-63d0-82ee-84a7-5cb74ed4b5cf" [ 1426.952595] env[62619]: _type = "Task" [ 1426.952595] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.962039] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a08a1c-63d0-82ee-84a7-5cb74ed4b5cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.016458] env[62619]: DEBUG nova.network.neutron [req-27728b43-b4e5-4871-83ee-3be97b15a49e req-4bbc2174-246d-41c0-8e9a-619cd3ff7db5 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Updated VIF entry in instance network info cache for port 28e9da04-af12-4a21-b4ee-408c492669ef. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1427.016845] env[62619]: DEBUG nova.network.neutron [req-27728b43-b4e5-4871-83ee-3be97b15a49e req-4bbc2174-246d-41c0-8e9a-619cd3ff7db5 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Updating instance_info_cache with network_info: [{"id": "28e9da04-af12-4a21-b4ee-408c492669ef", "address": "fa:16:3e:00:1b:cf", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28e9da04-af", "ovs_interfaceid": "28e9da04-af12-4a21-b4ee-408c492669ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.186974] env[62619]: INFO nova.compute.manager [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Rebuilding instance [ 1427.238133] env[62619]: DEBUG nova.compute.manager [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1427.238133] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db81daa2-02d9-44eb-bc44-7b2549755ce3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.276377] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777300, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.363884] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.774s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.364438] env[62619]: DEBUG nova.compute.manager [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1427.367664] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.830s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1427.367891] env[62619]: DEBUG nova.objects.instance [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lazy-loading 'resources' on Instance uuid 1847c5d8-16eb-4feb-8a09-24ad6728e59c {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1427.469239] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a08a1c-63d0-82ee-84a7-5cb74ed4b5cf, 'name': SearchDatastore_Task, 'duration_secs': 0.022857} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.470278] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab58c7aa-8264-4ec5-938f-50ab72613faa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.477258] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for the task: (returnval){ [ 1427.477258] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f59c07-5ce1-e73e-d620-2a1842c1c7a5" [ 1427.477258] env[62619]: _type = "Task" [ 1427.477258] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.495316] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f59c07-5ce1-e73e-d620-2a1842c1c7a5, 'name': SearchDatastore_Task} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.495586] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.495859] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 91ce0ab3-4fa4-4992-995a-0baeec91d9d0/91ce0ab3-4fa4-4992-995a-0baeec91d9d0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1427.496167] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c727d409-ead4-4820-9e22-fa4c7bc57180 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.504532] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for the task: (returnval){ [ 1427.504532] env[62619]: value = "task-1777301" [ 1427.504532] env[62619]: _type = "Task" [ 1427.504532] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.515812] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777301, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.519615] env[62619]: DEBUG oslo_concurrency.lockutils [req-27728b43-b4e5-4871-83ee-3be97b15a49e req-4bbc2174-246d-41c0-8e9a-619cd3ff7db5 service nova] Releasing lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.735148] env[62619]: DEBUG nova.network.neutron [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updated VIF entry in instance network info cache for port 3e989184-0116-4b59-b0cb-45f895f69e47. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1427.736103] env[62619]: DEBUG nova.network.neutron [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updating instance_info_cache with network_info: [{"id": "3e989184-0116-4b59-b0cb-45f895f69e47", "address": "fa:16:3e:69:5f:b0", "network": {"id": "458b9515-f6cd-49e3-b6f2-594543a98f0d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1747794949-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "22106ed527594810885b6891b382c3ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e989184-01", "ovs_interfaceid": "3e989184-0116-4b59-b0cb-45f895f69e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.781962] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777300, 'name': ReconfigVM_Task, 'duration_secs': 0.83907} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.786133] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 4374c102-a6fe-45ef-ad49-a1295f96899a/4374c102-a6fe-45ef-ad49-a1295f96899a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1427.786133] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9a1a861-f991-4562-a81b-5b384cef9dff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.792742] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for the task: (returnval){ [ 1427.792742] env[62619]: value = "task-1777302" [ 1427.792742] env[62619]: _type = "Task" [ 1427.792742] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.809562] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777302, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.872476] env[62619]: DEBUG nova.compute.utils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1427.879053] env[62619]: DEBUG nova.compute.manager [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1427.879302] env[62619]: DEBUG nova.network.neutron [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1427.891148] env[62619]: DEBUG nova.network.neutron [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Successfully updated port: a7a7cbea-2a80-4996-a6d0-8a345c83069f {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1428.021297] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777301, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.060116] env[62619]: DEBUG nova.policy [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e6ea39cb8e7487cacc8e399961e97c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '579c1e7229664b1d979a87a476d1de65', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1428.102761] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquiring lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.103172] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.103414] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquiring lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.103636] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.103872] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.106681] env[62619]: INFO nova.compute.manager [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Terminating instance [ 1428.238674] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] Releasing lock "refresh_cache-d16bebd1-a144-4d73-8eb6-8ab12a08fe69" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.239026] env[62619]: DEBUG nova.compute.manager [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Received event network-changed-19b98734-638c-48c1-aa4b-58c310858ab7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1428.239183] env[62619]: DEBUG nova.compute.manager [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Refreshing instance network info cache due to event network-changed-19b98734-638c-48c1-aa4b-58c310858ab7. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1428.239665] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] Acquiring lock "refresh_cache-91ce0ab3-4fa4-4992-995a-0baeec91d9d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.240568] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] Acquired lock "refresh_cache-91ce0ab3-4fa4-4992-995a-0baeec91d9d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.240568] env[62619]: DEBUG nova.network.neutron [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Refreshing network info cache for port 19b98734-638c-48c1-aa4b-58c310858ab7 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1428.254318] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1428.254670] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25d2051c-8e11-4211-9dd1-40295862c98d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.263860] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for the task: (returnval){ [ 1428.263860] env[62619]: value = "task-1777303" [ 1428.263860] env[62619]: _type = "Task" [ 1428.263860] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.278672] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777303, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.315724] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777302, 'name': Rename_Task, 'duration_secs': 0.495321} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.316054] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1428.316578] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-808a4bf3-ca8d-421f-b89e-8f82748e0422 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.328023] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for the task: (returnval){ [ 1428.328023] env[62619]: value = "task-1777304" [ 1428.328023] env[62619]: _type = "Task" [ 1428.328023] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.341954] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777304, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.381829] env[62619]: DEBUG nova.compute.manager [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1428.397869] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "refresh_cache-b6aae13f-0711-4421-9d55-de7ece3e4b89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.397869] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "refresh_cache-b6aae13f-0711-4421-9d55-de7ece3e4b89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.397869] env[62619]: DEBUG nova.network.neutron [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1428.521600] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777301, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.718205} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.522674] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 91ce0ab3-4fa4-4992-995a-0baeec91d9d0/91ce0ab3-4fa4-4992-995a-0baeec91d9d0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1428.522995] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1428.523284] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0c75f49-6f3d-4650-9455-5cd192856ee5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.534499] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for the task: (returnval){ [ 1428.534499] env[62619]: value = "task-1777305" [ 1428.534499] env[62619]: _type = "Task" [ 1428.534499] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.540920] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6cfd5d-4c6c-4d52-ac5e-787b47d8c6cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.547169] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777305, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.553275] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d3f962-335d-4365-ade0-491a8869e6f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.591551] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72846b87-ffa5-46cb-97e9-0b673e2db797 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.600739] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19aceda-7a3d-4742-aa4a-909a2a2709d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.622568] env[62619]: DEBUG nova.compute.manager [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1428.622793] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1428.623662] env[62619]: DEBUG nova.compute.provider_tree [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1428.626578] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30cf8484-8b93-48d3-b2b2-bf487f783453 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.637022] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1428.637022] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be4b5993-375b-45c1-affc-c4f69a50f1f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.645715] env[62619]: DEBUG oslo_vmware.api [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for the task: (returnval){ [ 1428.645715] env[62619]: value = "task-1777306" [ 1428.645715] env[62619]: _type = "Task" [ 1428.645715] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.654368] env[62619]: DEBUG oslo_vmware.api [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.774918] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777303, 'name': PowerOffVM_Task, 'duration_secs': 0.33387} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.775322] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1428.775433] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1428.777068] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101e28da-dd19-4465-b13c-90d44a5243dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.785743] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1428.786085] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1b60da9-0ab9-4bcf-8ccd-05ac6c990335 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.813903] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1428.814200] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1428.814407] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Deleting the datastore file [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1428.814701] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f99237bd-fed0-4c0c-9cb8-fec6382360b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.826893] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for the task: (returnval){ [ 1428.826893] env[62619]: value = "task-1777308" [ 1428.826893] env[62619]: _type = "Task" [ 1428.826893] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.840754] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.841010] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777304, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.969765] env[62619]: DEBUG nova.network.neutron [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1429.009323] env[62619]: DEBUG nova.network.neutron [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Successfully created port: a0692e57-a205-4f22-b960-7d48b202c513 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1429.048864] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777305, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128244} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.050392] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1429.050392] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3ab35c-f067-493e-97ff-38848b682792 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.079466] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 91ce0ab3-4fa4-4992-995a-0baeec91d9d0/91ce0ab3-4fa4-4992-995a-0baeec91d9d0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1429.080000] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7719a68e-d2f6-496f-bb44-760ba7540de3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.114884] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for the task: (returnval){ [ 1429.114884] env[62619]: value = "task-1777309" [ 1429.114884] env[62619]: _type = "Task" [ 1429.114884] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.128334] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777309, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.131652] env[62619]: DEBUG nova.scheduler.client.report [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1429.138435] env[62619]: DEBUG nova.compute.manager [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Received event network-vif-plugged-a7a7cbea-2a80-4996-a6d0-8a345c83069f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1429.138783] env[62619]: DEBUG oslo_concurrency.lockutils [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] Acquiring lock "b6aae13f-0711-4421-9d55-de7ece3e4b89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.139115] env[62619]: DEBUG oslo_concurrency.lockutils [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] Lock "b6aae13f-0711-4421-9d55-de7ece3e4b89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.139407] env[62619]: DEBUG oslo_concurrency.lockutils [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] Lock "b6aae13f-0711-4421-9d55-de7ece3e4b89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.139680] env[62619]: DEBUG nova.compute.manager [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] No waiting events found dispatching network-vif-plugged-a7a7cbea-2a80-4996-a6d0-8a345c83069f {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1429.140135] env[62619]: WARNING nova.compute.manager [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Received unexpected event network-vif-plugged-a7a7cbea-2a80-4996-a6d0-8a345c83069f for instance with vm_state building and task_state spawning. [ 1429.140483] env[62619]: DEBUG nova.compute.manager [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Received event network-changed-a7a7cbea-2a80-4996-a6d0-8a345c83069f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1429.140781] env[62619]: DEBUG nova.compute.manager [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Refreshing instance network info cache due to event network-changed-a7a7cbea-2a80-4996-a6d0-8a345c83069f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1429.142410] env[62619]: DEBUG oslo_concurrency.lockutils [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] Acquiring lock "refresh_cache-b6aae13f-0711-4421-9d55-de7ece3e4b89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.165151] env[62619]: DEBUG oslo_vmware.api [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777306, 'name': PowerOffVM_Task, 'duration_secs': 0.33491} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.165703] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1429.165999] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1429.166345] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3501757-0139-4790-8518-e728b0c7ab95 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.202918] env[62619]: DEBUG nova.network.neutron [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Updating instance_info_cache with network_info: [{"id": "a7a7cbea-2a80-4996-a6d0-8a345c83069f", "address": "fa:16:3e:87:7c:0f", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a7cbea-2a", "ovs_interfaceid": "a7a7cbea-2a80-4996-a6d0-8a345c83069f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.257051] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1429.257558] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1429.257644] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Deleting the datastore file [datastore1] 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1429.257984] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0758e829-46ef-47ad-b776-e3073953d5fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.268490] env[62619]: DEBUG oslo_vmware.api [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for the task: (returnval){ [ 1429.268490] env[62619]: value = "task-1777311" [ 1429.268490] env[62619]: _type = "Task" [ 1429.268490] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.279810] env[62619]: DEBUG oslo_vmware.api [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777311, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.340381] env[62619]: DEBUG oslo_vmware.api [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777304, 'name': PowerOnVM_Task, 'duration_secs': 0.864033} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.346712] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1429.346712] env[62619]: INFO nova.compute.manager [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Took 9.82 seconds to spawn the instance on the hypervisor. [ 1429.346712] env[62619]: DEBUG nova.compute.manager [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1429.346712] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176415} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.347172] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96cc6f14-c311-4594-8f1b-79109f30efe0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.350887] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1429.351980] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1429.353022] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1429.394029] env[62619]: DEBUG nova.compute.manager [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1429.426985] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1429.427277] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1429.427443] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1429.427622] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1429.427763] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1429.427923] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1429.428202] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1429.432023] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1429.432023] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1429.432023] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1429.432023] env[62619]: DEBUG nova.virt.hardware [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1429.432023] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00eb2d7a-229f-4e15-b6c6-a437c4fb653b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.434605] env[62619]: DEBUG nova.network.neutron [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Updated VIF entry in instance network info cache for port 19b98734-638c-48c1-aa4b-58c310858ab7. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1429.434925] env[62619]: DEBUG nova.network.neutron [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Updating instance_info_cache with network_info: [{"id": "19b98734-638c-48c1-aa4b-58c310858ab7", "address": "fa:16:3e:ad:1a:90", "network": {"id": "788c512c-28df-4f6e-9573-3ba4f90ef1bd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1238022785-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b9c1aa2b9334d409120b93382a7e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53ebf5df-5ecb-4a0c-a163-d88165639de0", "external-id": "nsx-vlan-transportzone-588", "segmentation_id": 588, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19b98734-63", "ovs_interfaceid": "19b98734-638c-48c1-aa4b-58c310858ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.439463] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd75c2d7-00fc-4737-96eb-9e8b2767ab85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.634370] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777309, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.638519] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.270s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.646592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.183s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.648386] env[62619]: INFO nova.compute.claims [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1429.683261] env[62619]: INFO nova.scheduler.client.report [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Deleted allocations for instance 1847c5d8-16eb-4feb-8a09-24ad6728e59c [ 1429.706385] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "refresh_cache-b6aae13f-0711-4421-9d55-de7ece3e4b89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.707024] env[62619]: DEBUG nova.compute.manager [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Instance network_info: |[{"id": "a7a7cbea-2a80-4996-a6d0-8a345c83069f", "address": "fa:16:3e:87:7c:0f", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a7cbea-2a", "ovs_interfaceid": "a7a7cbea-2a80-4996-a6d0-8a345c83069f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1429.707024] env[62619]: DEBUG oslo_concurrency.lockutils [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] Acquired lock "refresh_cache-b6aae13f-0711-4421-9d55-de7ece3e4b89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.707256] env[62619]: DEBUG nova.network.neutron [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Refreshing network info cache for port a7a7cbea-2a80-4996-a6d0-8a345c83069f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1429.708352] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:7c:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7a7cbea-2a80-4996-a6d0-8a345c83069f', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1429.719534] env[62619]: DEBUG oslo.service.loopingcall [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1429.723964] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1429.724536] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae687154-b23d-48d7-a540-77164a6db0f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.753356] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1429.753356] env[62619]: value = "task-1777312" [ 1429.753356] env[62619]: _type = "Task" [ 1429.753356] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.766641] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777312, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.779361] env[62619]: DEBUG oslo_vmware.api [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Task: {'id': task-1777311, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.438015} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.779760] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1429.779760] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1429.779937] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1429.780145] env[62619]: INFO nova.compute.manager [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1429.780345] env[62619]: DEBUG oslo.service.loopingcall [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1429.780777] env[62619]: DEBUG nova.compute.manager [-] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1429.780878] env[62619]: DEBUG nova.network.neutron [-] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1429.874617] env[62619]: INFO nova.compute.manager [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Took 43.83 seconds to build instance. [ 1429.944679] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e5b3cc-76b5-4425-92d2-93bf3634a50d req-3a15c41c-aad4-4c15-b8aa-635254ee71cf service nova] Releasing lock "refresh_cache-91ce0ab3-4fa4-4992-995a-0baeec91d9d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.994563] env[62619]: DEBUG nova.network.neutron [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Successfully updated port: 5ce94831-5f9c-4556-936d-e71ee8d04f45 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1430.127304] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777309, 'name': ReconfigVM_Task, 'duration_secs': 0.591445} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.127576] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 91ce0ab3-4fa4-4992-995a-0baeec91d9d0/91ce0ab3-4fa4-4992-995a-0baeec91d9d0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1430.128227] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd5e7ae7-859a-4aeb-ad07-a35c62a63f28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.136425] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for the task: (returnval){ [ 1430.136425] env[62619]: value = "task-1777313" [ 1430.136425] env[62619]: _type = "Task" [ 1430.136425] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.146063] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777313, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.193400] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e390f4c-f3e8-4e2f-bd46-014ccb3ad0a9 tempest-ServerDiagnosticsV248Test-76292928 tempest-ServerDiagnosticsV248Test-76292928-project-member] Lock "1847c5d8-16eb-4feb-8a09-24ad6728e59c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.145s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.235056] env[62619]: DEBUG nova.network.neutron [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Updated VIF entry in instance network info cache for port a7a7cbea-2a80-4996-a6d0-8a345c83069f. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1430.235469] env[62619]: DEBUG nova.network.neutron [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Updating instance_info_cache with network_info: [{"id": "a7a7cbea-2a80-4996-a6d0-8a345c83069f", "address": "fa:16:3e:87:7c:0f", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a7cbea-2a", "ovs_interfaceid": "a7a7cbea-2a80-4996-a6d0-8a345c83069f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.247012] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquiring lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.247410] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.247549] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquiring lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.247746] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.247987] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.250033] env[62619]: INFO nova.compute.manager [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Terminating instance [ 1430.265259] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777312, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.376527] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd1e917-de87-4073-b917-c7b505cefe7b tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Lock "4374c102-a6fe-45ef-ad49-a1295f96899a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.957s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.406588] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1430.406832] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1430.406983] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1430.413405] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1430.413405] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1430.413405] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1430.413405] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1430.413405] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1430.413663] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1430.413663] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1430.413663] env[62619]: DEBUG nova.virt.hardware [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1430.414427] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9035ffff-86d5-47c3-a7a5-dbf3db78a307 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.426980] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce672247-0be4-4c83-8e94-c7b522be07bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.445941] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1430.455021] env[62619]: DEBUG oslo.service.loopingcall [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1430.455021] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1430.455021] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e8c298c-f6e6-4259-8103-b86b569df5d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.472420] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1430.472420] env[62619]: value = "task-1777314" [ 1430.472420] env[62619]: _type = "Task" [ 1430.472420] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.481851] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777314, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.496813] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "refresh_cache-6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.496962] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquired lock "refresh_cache-6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.497192] env[62619]: DEBUG nova.network.neutron [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1430.572663] env[62619]: DEBUG nova.compute.manager [req-6cfd4539-e497-4f62-9f70-210993846448 req-c089be2e-8645-4618-a3b7-f95a48be6fed service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Received event network-vif-deleted-902ccd9e-7453-4859-8766-73a3449e946d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1430.573241] env[62619]: INFO nova.compute.manager [req-6cfd4539-e497-4f62-9f70-210993846448 req-c089be2e-8645-4618-a3b7-f95a48be6fed service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Neutron deleted interface 902ccd9e-7453-4859-8766-73a3449e946d; detaching it from the instance and deleting it from the info cache [ 1430.573241] env[62619]: DEBUG nova.network.neutron [req-6cfd4539-e497-4f62-9f70-210993846448 req-c089be2e-8645-4618-a3b7-f95a48be6fed service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.590199] env[62619]: DEBUG nova.network.neutron [-] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.649817] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777313, 'name': Rename_Task, 'duration_secs': 0.382935} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.650169] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1430.650474] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-222a0da0-af32-4432-92eb-52faaef0c07b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.670065] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for the task: (returnval){ [ 1430.670065] env[62619]: value = "task-1777315" [ 1430.670065] env[62619]: _type = "Task" [ 1430.670065] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.685074] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.738488] env[62619]: DEBUG oslo_concurrency.lockutils [req-b60fccc1-09c5-4b4b-85d3-2ed75c6c33c7 req-0d948fcc-9a37-4f82-b81f-8281071f35e7 service nova] Releasing lock "refresh_cache-b6aae13f-0711-4421-9d55-de7ece3e4b89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.755774] env[62619]: DEBUG nova.compute.manager [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1430.755997] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1430.757277] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38de2db-5af0-4178-a1db-ceb009e881ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.776518] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777312, 'name': CreateVM_Task, 'duration_secs': 0.806162} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.777452] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1430.777671] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.777948] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.778208] env[62619]: DEBUG nova.compute.manager [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Going to confirm migration 1 {{(pid=62619) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1430.780167] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1430.780411] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4582d7a5-339a-437a-9c38-f7741cee855f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.783473] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.783473] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.783473] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1430.783655] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb8c86a0-32fd-4d6a-aafc-ebbd6584fdb5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.789348] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1430.789348] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52358645-c9c5-94c8-378d-4bc20d233583" [ 1430.789348] env[62619]: _type = "Task" [ 1430.789348] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.796520] env[62619]: DEBUG oslo_vmware.api [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1430.796520] env[62619]: value = "task-1777316" [ 1430.796520] env[62619]: _type = "Task" [ 1430.796520] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.803666] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52358645-c9c5-94c8-378d-4bc20d233583, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.809825] env[62619]: DEBUG oslo_vmware.api [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.880227] env[62619]: DEBUG nova.compute.manager [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1430.989218] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777314, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.049613] env[62619]: DEBUG nova.network.neutron [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1431.076112] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c837195e-6a68-4164-9536-3866123a447f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.089104] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db4a6e7-fb49-4a74-9575-5bade4b6e18b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.110046] env[62619]: INFO nova.compute.manager [-] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Took 1.33 seconds to deallocate network for instance. [ 1431.144890] env[62619]: DEBUG nova.compute.manager [req-6cfd4539-e497-4f62-9f70-210993846448 req-c089be2e-8645-4618-a3b7-f95a48be6fed service nova] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Detach interface failed, port_id=902ccd9e-7453-4859-8766-73a3449e946d, reason: Instance 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1431.185838] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777315, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.265069] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fca9379-80b2-4e30-8352-a0d5cd77ae18 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.274137] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97eaeff4-316e-46e4-8e73-bc482d273b0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.322890] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00e89c3-4704-4ea1-afd1-368eba5b6e19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.331721] env[62619]: DEBUG oslo_vmware.api [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777316, 'name': PowerOffVM_Task, 'duration_secs': 0.391672} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.338401] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1431.338401] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1431.338401] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52358645-c9c5-94c8-378d-4bc20d233583, 'name': SearchDatastore_Task, 'duration_secs': 0.014493} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.338401] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea218197-0fb9-45c0-b69c-bc632298dbd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.339178] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.339341] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1431.339600] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.339676] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.339831] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.341064] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f066791-ea20-4402-a910-affb05c41387 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.344782] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42af781c-4e15-4e61-9ca0-c9b7ae26953c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.359119] env[62619]: DEBUG nova.compute.provider_tree [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1431.361707] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.361903] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1431.365187] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23a69eb4-8429-44c3-b616-4d8ec93f8067 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.374903] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1431.374903] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a1fbd6-c6a9-8772-c468-ed0055bca954" [ 1431.374903] env[62619]: _type = "Task" [ 1431.374903] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.387819] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a1fbd6-c6a9-8772-c468-ed0055bca954, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.405128] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.419195] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.419462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.419557] env[62619]: DEBUG nova.network.neutron [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1431.419718] env[62619]: DEBUG nova.objects.instance [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lazy-loading 'info_cache' on Instance uuid aa4906f1-e801-4df0-819e-8c5fb5930fb5 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1431.440440] env[62619]: DEBUG nova.network.neutron [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Updating instance_info_cache with network_info: [{"id": "5ce94831-5f9c-4556-936d-e71ee8d04f45", "address": "fa:16:3e:b6:c9:d0", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ce94831-5f", "ovs_interfaceid": "5ce94831-5f9c-4556-936d-e71ee8d04f45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.484991] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777314, 'name': CreateVM_Task, 'duration_secs': 0.542479} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.485220] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1431.485636] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.485809] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.486177] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1431.486463] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7020f5d3-e681-439b-b49b-cde2d9cf0a2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.492631] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for the task: (returnval){ [ 1431.492631] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dba05c-a6e4-0895-2fc1-4cba96e93b3c" [ 1431.492631] env[62619]: _type = "Task" [ 1431.492631] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.503505] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dba05c-a6e4-0895-2fc1-4cba96e93b3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.616947] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "7217d898-54ee-46ed-88fa-959c38e988e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.617191] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "7217d898-54ee-46ed-88fa-959c38e988e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.617882] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.624724] env[62619]: DEBUG nova.compute.manager [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Received event network-vif-plugged-5ce94831-5f9c-4556-936d-e71ee8d04f45 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1431.624824] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] Acquiring lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.625036] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] Lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.625206] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] Lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.625370] env[62619]: DEBUG nova.compute.manager [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] No waiting events found dispatching network-vif-plugged-5ce94831-5f9c-4556-936d-e71ee8d04f45 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1431.625535] env[62619]: WARNING nova.compute.manager [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Received unexpected event network-vif-plugged-5ce94831-5f9c-4556-936d-e71ee8d04f45 for instance with vm_state building and task_state spawning. [ 1431.625690] env[62619]: DEBUG nova.compute.manager [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Received event network-changed-5ce94831-5f9c-4556-936d-e71ee8d04f45 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1431.625842] env[62619]: DEBUG nova.compute.manager [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Refreshing instance network info cache due to event network-changed-5ce94831-5f9c-4556-936d-e71ee8d04f45. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1431.626007] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] Acquiring lock "refresh_cache-6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.688440] env[62619]: DEBUG oslo_vmware.api [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777315, 'name': PowerOnVM_Task, 'duration_secs': 0.935745} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.690714] env[62619]: DEBUG nova.network.neutron [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Successfully updated port: a0692e57-a205-4f22-b960-7d48b202c513 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1431.693215] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1431.693595] env[62619]: INFO nova.compute.manager [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Took 9.45 seconds to spawn the instance on the hypervisor. [ 1431.694137] env[62619]: DEBUG nova.compute.manager [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1431.695723] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90bb265-1d86-498d-b4a9-704c59bc0ebc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.736913] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1431.737505] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1431.737876] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Deleting the datastore file [datastore1] d16bebd1-a144-4d73-8eb6-8ab12a08fe69 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1431.738547] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1fbbc50-9668-4897-8356-7eff0279ff02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.747637] env[62619]: DEBUG oslo_vmware.api [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for the task: (returnval){ [ 1431.747637] env[62619]: value = "task-1777318" [ 1431.747637] env[62619]: _type = "Task" [ 1431.747637] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.758653] env[62619]: DEBUG oslo_vmware.api [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777318, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.865061] env[62619]: DEBUG nova.scheduler.client.report [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1431.887068] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a1fbd6-c6a9-8772-c468-ed0055bca954, 'name': SearchDatastore_Task, 'duration_secs': 0.016949} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.888642] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9c94836-c45f-4ccc-b5ce-89c913b76d7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.895180] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1431.895180] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fb23eb-56ca-c023-3ac4-e1cc44c39f11" [ 1431.895180] env[62619]: _type = "Task" [ 1431.895180] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.904534] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fb23eb-56ca-c023-3ac4-e1cc44c39f11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.943120] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Releasing lock "refresh_cache-6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.943547] env[62619]: DEBUG nova.compute.manager [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Instance network_info: |[{"id": "5ce94831-5f9c-4556-936d-e71ee8d04f45", "address": "fa:16:3e:b6:c9:d0", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ce94831-5f", "ovs_interfaceid": "5ce94831-5f9c-4556-936d-e71ee8d04f45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1431.943746] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] Acquired lock "refresh_cache-6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.943922] env[62619]: DEBUG nova.network.neutron [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Refreshing network info cache for port 5ce94831-5f9c-4556-936d-e71ee8d04f45 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1431.945343] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:c9:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ce94831-5f9c-4556-936d-e71ee8d04f45', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1431.954410] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Creating folder: Project (579c1e7229664b1d979a87a476d1de65). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1431.956060] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f41acbd-ef3e-484a-9f9b-fe2abdd00aab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.971165] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Created folder: Project (579c1e7229664b1d979a87a476d1de65) in parent group-v368875. [ 1431.971165] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Creating folder: Instances. Parent ref: group-v368947. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1431.971165] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef2ba9f5-8398-451e-bdec-4a149531b800 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.985194] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Created folder: Instances in parent group-v368947. [ 1431.985451] env[62619]: DEBUG oslo.service.loopingcall [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1431.985650] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1431.985856] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96e9f830-f34b-49b5-9ee4-914252d46b74 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.014593] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dba05c-a6e4-0895-2fc1-4cba96e93b3c, 'name': SearchDatastore_Task, 'duration_secs': 0.028541} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.016564] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.016564] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1432.016718] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1432.016853] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1432.016853] env[62619]: value = "task-1777321" [ 1432.016853] env[62619]: _type = "Task" [ 1432.016853] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.025524] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777321, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.196594] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "refresh_cache-80363e16-5dd2-42ad-9ead-25b121d62211" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1432.196905] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquired lock "refresh_cache-80363e16-5dd2-42ad-9ead-25b121d62211" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.197375] env[62619]: DEBUG nova.network.neutron [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1432.217888] env[62619]: INFO nova.compute.manager [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Took 42.32 seconds to build instance. [ 1432.259115] env[62619]: DEBUG oslo_vmware.api [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Task: {'id': task-1777318, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.475626} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.259115] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1432.259115] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1432.259293] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1432.259455] env[62619]: INFO nova.compute.manager [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Took 1.50 seconds to destroy the instance on the hypervisor. [ 1432.259691] env[62619]: DEBUG oslo.service.loopingcall [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1432.259867] env[62619]: DEBUG nova.compute.manager [-] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1432.259955] env[62619]: DEBUG nova.network.neutron [-] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1432.371245] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.725s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.374324] env[62619]: DEBUG nova.compute.manager [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1432.374971] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 31.734s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.375923] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.378138] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1432.378457] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.420s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.379093] env[62619]: DEBUG nova.objects.instance [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lazy-loading 'resources' on Instance uuid f46de981-1f04-4baf-874c-de1b95d16f9d {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1432.381029] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873d5595-c644-431f-93cb-bb54a34da0db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.391900] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2aeb619-4d5a-4914-a9e2-ab40d6fe910f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.418435] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cead33-a78c-4be2-8cdf-1fa14d46f568 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.423103] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fb23eb-56ca-c023-3ac4-e1cc44c39f11, 'name': SearchDatastore_Task, 'duration_secs': 0.017413} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.423485] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.423734] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] b6aae13f-0711-4421-9d55-de7ece3e4b89/b6aae13f-0711-4421-9d55-de7ece3e4b89.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1432.424881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.425121] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1432.426170] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56359374-5430-4663-89fd-143705224ee8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.432020] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-387a5c19-4fab-497f-a4d9-911688701902 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.438140] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6e777f-eadc-4661-90b8-42d1bf56c652 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.444473] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1432.444473] env[62619]: value = "task-1777322" [ 1432.444473] env[62619]: _type = "Task" [ 1432.444473] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.481727] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1432.482239] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1432.486263] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179833MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1432.486263] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.486844] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9eaec1f-6886-49d4-bf15-996950ae0f47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.493530] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.497471] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for the task: (returnval){ [ 1432.497471] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280f214-f728-ce8b-717e-59627728fcb8" [ 1432.497471] env[62619]: _type = "Task" [ 1432.497471] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.507552] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280f214-f728-ce8b-717e-59627728fcb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.535883] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777321, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.574498] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "a802534f-1766-4ea9-9188-803ef197d775" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.574750] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "a802534f-1766-4ea9-9188-803ef197d775" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.723185] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3136a15-b616-48aa-adda-27089aefdc87 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.289s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.740149] env[62619]: DEBUG nova.network.neutron [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Updated VIF entry in instance network info cache for port 5ce94831-5f9c-4556-936d-e71ee8d04f45. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1432.740612] env[62619]: DEBUG nova.network.neutron [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Updating instance_info_cache with network_info: [{"id": "5ce94831-5f9c-4556-936d-e71ee8d04f45", "address": "fa:16:3e:b6:c9:d0", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ce94831-5f", "ovs_interfaceid": "5ce94831-5f9c-4556-936d-e71ee8d04f45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.841434] env[62619]: DEBUG nova.network.neutron [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1432.882206] env[62619]: DEBUG nova.compute.utils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1432.884059] env[62619]: DEBUG nova.compute.manager [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1432.884059] env[62619]: DEBUG nova.network.neutron [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1432.958162] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777322, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.014828] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280f214-f728-ce8b-717e-59627728fcb8, 'name': SearchDatastore_Task, 'duration_secs': 0.017306} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.015927] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc103fec-198d-4c18-afd8-d9a9f1dc4931 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.030072] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for the task: (returnval){ [ 1433.030072] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523b703a-2bf7-c6c9-7efb-9d5b01973fc0" [ 1433.030072] env[62619]: _type = "Task" [ 1433.030072] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.042484] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777321, 'name': CreateVM_Task, 'duration_secs': 0.679264} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.042484] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1433.042484] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.042484] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.042484] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1433.042484] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70bb51d2-178b-4669-a36c-7d0e1d276036 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.054624] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1433.054624] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524ab820-fcee-c44e-8788-d39793609ef7" [ 1433.054624] env[62619]: _type = "Task" [ 1433.054624] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.054977] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523b703a-2bf7-c6c9-7efb-9d5b01973fc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.069596] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524ab820-fcee-c44e-8788-d39793609ef7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.097051] env[62619]: DEBUG nova.policy [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26da2348a75d491b9494d62f389e56e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90501fd522094b02a04da8bc54edbcde', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1433.223103] env[62619]: DEBUG nova.compute.manager [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1433.239496] env[62619]: DEBUG nova.network.neutron [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance_info_cache with network_info: [{"id": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "address": "fa:16:3e:96:ec:35", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5530a947-e3", "ovs_interfaceid": "5530a947-e30a-4156-be0f-8e8dd90d2aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.247720] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe4f1a85-c746-45e5-b5e4-1607087c5085 req-53f13a43-755a-4434-8555-02e263c28687 service nova] Releasing lock "refresh_cache-6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.381492] env[62619]: DEBUG nova.network.neutron [-] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.389431] env[62619]: DEBUG nova.compute.manager [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1433.438384] env[62619]: DEBUG nova.network.neutron [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Updating instance_info_cache with network_info: [{"id": "a0692e57-a205-4f22-b960-7d48b202c513", "address": "fa:16:3e:b5:f1:11", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.196", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0692e57-a2", "ovs_interfaceid": "a0692e57-a205-4f22-b960-7d48b202c513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.457900] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777322, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.682093} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.459375] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] b6aae13f-0711-4421-9d55-de7ece3e4b89/b6aae13f-0711-4421-9d55-de7ece3e4b89.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1433.459601] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1433.460363] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e47b59-cf2b-4acd-a858-8f33f549769a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.462862] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33601160-8d5c-422c-8883-c9492ea14203 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.472067] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ffcd9f-869a-4b46-baa6-57013663ef03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.475668] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1433.475668] env[62619]: value = "task-1777323" [ 1433.475668] env[62619]: _type = "Task" [ 1433.475668] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.510594] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059ad01e-3ea0-4ce8-9b31-cfb56addd554 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.513546] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777323, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.519602] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7833a322-0101-4b16-8d7a-1b9af7a07f47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.534419] env[62619]: DEBUG nova.compute.provider_tree [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.547234] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523b703a-2bf7-c6c9-7efb-9d5b01973fc0, 'name': SearchDatastore_Task, 'duration_secs': 0.050949} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.547503] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.548356] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1433.548356] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28e5ea49-8f82-45e3-9d6e-447d31bd45c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.558840] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for the task: (returnval){ [ 1433.558840] env[62619]: value = "task-1777324" [ 1433.558840] env[62619]: _type = "Task" [ 1433.558840] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.572402] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524ab820-fcee-c44e-8788-d39793609ef7, 'name': SearchDatastore_Task, 'duration_secs': 0.038807} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.575569] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.575791] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1433.576031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.576174] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.576345] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1433.576604] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.576815] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35f5cf1f-7ae4-4dce-8225-4e531adfb073 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.594447] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1433.594661] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1433.595402] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c2d527d-79d7-429a-a94f-030fc3e25650 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.602250] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1433.602250] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c4a71c-044d-96f2-2da3-fed621bd8b7d" [ 1433.602250] env[62619]: _type = "Task" [ 1433.602250] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.611646] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c4a71c-044d-96f2-2da3-fed621bd8b7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.741356] env[62619]: DEBUG nova.network.neutron [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Successfully created port: 9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1433.771131] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "refresh_cache-aa4906f1-e801-4df0-819e-8c5fb5930fb5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.771131] env[62619]: DEBUG nova.objects.instance [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lazy-loading 'migration_context' on Instance uuid aa4906f1-e801-4df0-819e-8c5fb5930fb5 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1433.771131] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.883715] env[62619]: INFO nova.compute.manager [-] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Took 1.62 seconds to deallocate network for instance. [ 1433.944024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Releasing lock "refresh_cache-80363e16-5dd2-42ad-9ead-25b121d62211" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.944024] env[62619]: DEBUG nova.compute.manager [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Instance network_info: |[{"id": "a0692e57-a205-4f22-b960-7d48b202c513", "address": "fa:16:3e:b5:f1:11", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.196", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0692e57-a2", "ovs_interfaceid": "a0692e57-a205-4f22-b960-7d48b202c513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1433.944370] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:f1:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0692e57-a205-4f22-b960-7d48b202c513', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1433.952331] env[62619]: DEBUG oslo.service.loopingcall [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1433.953802] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1433.954223] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-438c6ade-c33c-4123-8b2a-bb6c443b8caa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.981757] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1433.981757] env[62619]: value = "task-1777325" [ 1433.981757] env[62619]: _type = "Task" [ 1433.981757] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.989372] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777323, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07632} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.990057] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1433.991375] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66a356a-6f31-4665-81d3-46fc2d12cbc4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.998539] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777325, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.024065] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] b6aae13f-0711-4421-9d55-de7ece3e4b89/b6aae13f-0711-4421-9d55-de7ece3e4b89.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1434.024065] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68b55f27-cd50-4fb6-a46f-b5bc760e49eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.041439] env[62619]: DEBUG nova.scheduler.client.report [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1434.048584] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1434.048584] env[62619]: value = "task-1777326" [ 1434.048584] env[62619]: _type = "Task" [ 1434.048584] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.059443] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777326, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.069561] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777324, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.084143] env[62619]: DEBUG nova.compute.manager [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Received event network-vif-plugged-a0692e57-a205-4f22-b960-7d48b202c513 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1434.084143] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] Acquiring lock "80363e16-5dd2-42ad-9ead-25b121d62211-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.084143] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] Lock "80363e16-5dd2-42ad-9ead-25b121d62211-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.084143] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] Lock "80363e16-5dd2-42ad-9ead-25b121d62211-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.084739] env[62619]: DEBUG nova.compute.manager [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] No waiting events found dispatching network-vif-plugged-a0692e57-a205-4f22-b960-7d48b202c513 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1434.084739] env[62619]: WARNING nova.compute.manager [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Received unexpected event network-vif-plugged-a0692e57-a205-4f22-b960-7d48b202c513 for instance with vm_state building and task_state spawning. [ 1434.085069] env[62619]: DEBUG nova.compute.manager [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Received event network-changed-a0692e57-a205-4f22-b960-7d48b202c513 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1434.085202] env[62619]: DEBUG nova.compute.manager [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Refreshing instance network info cache due to event network-changed-a0692e57-a205-4f22-b960-7d48b202c513. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1434.085943] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] Acquiring lock "refresh_cache-80363e16-5dd2-42ad-9ead-25b121d62211" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1434.085943] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] Acquired lock "refresh_cache-80363e16-5dd2-42ad-9ead-25b121d62211" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.085943] env[62619]: DEBUG nova.network.neutron [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Refreshing network info cache for port a0692e57-a205-4f22-b960-7d48b202c513 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1434.117051] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c4a71c-044d-96f2-2da3-fed621bd8b7d, 'name': SearchDatastore_Task, 'duration_secs': 0.041799} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.117865] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0edbd57a-3a7c-4571-8dd9-c8d33fe97df9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.124751] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1434.124751] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b8828-29bd-716a-9b6e-218454bf98f4" [ 1434.124751] env[62619]: _type = "Task" [ 1434.124751] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.137124] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b8828-29bd-716a-9b6e-218454bf98f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.254786] env[62619]: DEBUG nova.objects.base [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1434.256303] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adf7915-5370-4fbf-a4d6-ff092e1dec32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.287220] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c9782a2-42f4-4fb0-a7c3-47126c4f9d6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.297322] env[62619]: DEBUG oslo_vmware.api [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1434.297322] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e80a4d-c409-f2bd-22d2-06ab363429c4" [ 1434.297322] env[62619]: _type = "Task" [ 1434.297322] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.307870] env[62619]: DEBUG oslo_vmware.api [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e80a4d-c409-f2bd-22d2-06ab363429c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.391361] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.398803] env[62619]: DEBUG nova.compute.manager [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1434.431595] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1434.432375] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1434.432375] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1434.432611] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1434.432611] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1434.432805] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1434.433150] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1434.433371] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1434.433512] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1434.433674] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1434.433843] env[62619]: DEBUG nova.virt.hardware [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1434.434773] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e275faa0-5b22-4847-b0d2-fd02c2b75b9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.444884] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a961253-49c8-4401-8df0-c8d7eb9a1882 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.496200] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777325, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.551168] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.170s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.552940] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.422s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.559285] env[62619]: INFO nova.compute.claims [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1434.575416] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777324, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.579602] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777326, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.580927] env[62619]: INFO nova.scheduler.client.report [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Deleted allocations for instance f46de981-1f04-4baf-874c-de1b95d16f9d [ 1434.637430] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b8828-29bd-716a-9b6e-218454bf98f4, 'name': SearchDatastore_Task, 'duration_secs': 0.056226} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.640990] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.641394] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec/6dbe4133-a6ba-4bba-9eb9-47a3d2691eec.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1434.642483] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c1d9569-b588-4a54-89b0-ea0679958aa6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.652392] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1434.652392] env[62619]: value = "task-1777327" [ 1434.652392] env[62619]: _type = "Task" [ 1434.652392] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.673276] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.813256] env[62619]: DEBUG oslo_vmware.api [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e80a4d-c409-f2bd-22d2-06ab363429c4, 'name': SearchDatastore_Task, 'duration_secs': 0.036081} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.813654] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.953174] env[62619]: DEBUG nova.network.neutron [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Updated VIF entry in instance network info cache for port a0692e57-a205-4f22-b960-7d48b202c513. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1434.953578] env[62619]: DEBUG nova.network.neutron [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Updating instance_info_cache with network_info: [{"id": "a0692e57-a205-4f22-b960-7d48b202c513", "address": "fa:16:3e:b5:f1:11", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.196", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0692e57-a2", "ovs_interfaceid": "a0692e57-a205-4f22-b960-7d48b202c513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.996899] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777325, 'name': CreateVM_Task, 'duration_secs': 0.549321} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.997102] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1434.997929] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1434.998101] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.998414] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1434.998671] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12495a1e-3535-4f0d-b330-24dd408f5e61 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.004802] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1435.004802] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52534e42-85ff-feb5-642e-08a1e6cc4049" [ 1435.004802] env[62619]: _type = "Task" [ 1435.004802] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.016896] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52534e42-85ff-feb5-642e-08a1e6cc4049, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.067146] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777326, 'name': ReconfigVM_Task, 'duration_secs': 0.812615} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.067146] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Reconfigured VM instance instance-00000019 to attach disk [datastore1] b6aae13f-0711-4421-9d55-de7ece3e4b89/b6aae13f-0711-4421-9d55-de7ece3e4b89.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1435.070684] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15c957f1-e0bf-4f8d-acbd-3922aab103e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.079568] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777324, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.02227} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.081077] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1435.081384] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1435.081672] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1435.081672] env[62619]: value = "task-1777328" [ 1435.081672] env[62619]: _type = "Task" [ 1435.081672] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.082157] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e15287c-c185-4d79-bc6c-db3d425022e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.091763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ee9afe5-5bae-4f86-925a-20c085e68488 tempest-ServerAddressesTestJSON-834559229 tempest-ServerAddressesTestJSON-834559229-project-member] Lock "f46de981-1f04-4baf-874c-de1b95d16f9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.382s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.095501] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for the task: (returnval){ [ 1435.095501] env[62619]: value = "task-1777329" [ 1435.095501] env[62619]: _type = "Task" [ 1435.095501] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.104469] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777328, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.114231] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.167210] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777327, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.456904] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] Releasing lock "refresh_cache-80363e16-5dd2-42ad-9ead-25b121d62211" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.457228] env[62619]: DEBUG nova.compute.manager [req-a0d8537a-b287-44b8-bef8-24d151087d39 req-5edfebac-7c88-4805-8ae5-21834b9aecd2 service nova] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Received event network-vif-deleted-3e989184-0116-4b59-b0cb-45f895f69e47 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1435.519965] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52534e42-85ff-feb5-642e-08a1e6cc4049, 'name': SearchDatastore_Task, 'duration_secs': 0.049666} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.520279] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.520506] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1435.520736] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.520879] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.521071] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1435.521344] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e8002bd-5e11-4334-b591-3752d73ff50a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.535278] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1435.535467] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1435.536281] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4fc09d5-373c-46d4-8ed3-56f5912ec6c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.543083] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1435.543083] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52780e7b-c217-f519-0f83-a57d16b3d7b6" [ 1435.543083] env[62619]: _type = "Task" [ 1435.543083] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.551974] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52780e7b-c217-f519-0f83-a57d16b3d7b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.598316] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777328, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.605178] env[62619]: DEBUG nova.network.neutron [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Successfully updated port: 9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1435.613153] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081427} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.613153] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1435.613612] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9900cedc-f8eb-4cde-af19-86bbb3bfa284 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.643084] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1435.648240] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7a9b20d-21ba-4c76-8ec5-d276bb901f46 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.679209] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777327, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.891714} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.679209] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for the task: (returnval){ [ 1435.679209] env[62619]: value = "task-1777330" [ 1435.679209] env[62619]: _type = "Task" [ 1435.679209] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.682807] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec/6dbe4133-a6ba-4bba-9eb9-47a3d2691eec.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1435.683477] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1435.693238] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1472bbc7-6401-4a0c-b958-60346647e854 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.710886] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777330, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.711257] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1435.711257] env[62619]: value = "task-1777331" [ 1435.711257] env[62619]: _type = "Task" [ 1435.711257] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.726433] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777331, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.057193] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52780e7b-c217-f519-0f83-a57d16b3d7b6, 'name': SearchDatastore_Task, 'duration_secs': 0.045162} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.057651] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f22c6925-a674-4e4e-b21e-d612cbbc802d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.066987] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1436.066987] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525c47f6-75a8-67c0-c860-5d2123ca1f3a" [ 1436.066987] env[62619]: _type = "Task" [ 1436.066987] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.075442] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525c47f6-75a8-67c0-c860-5d2123ca1f3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.096994] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777328, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.111921] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquiring lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.112148] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquired lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.112312] env[62619]: DEBUG nova.network.neutron [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1436.191802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquiring lock "4374c102-a6fe-45ef-ad49-a1295f96899a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.192080] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Lock "4374c102-a6fe-45ef-ad49-a1295f96899a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.192305] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquiring lock "4374c102-a6fe-45ef-ad49-a1295f96899a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.192487] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Lock "4374c102-a6fe-45ef-ad49-a1295f96899a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.193180] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Lock "4374c102-a6fe-45ef-ad49-a1295f96899a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.194720] env[62619]: INFO nova.compute.manager [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Terminating instance [ 1436.205041] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777330, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.216667] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec55eb3e-db5b-437e-b33f-8aba7bffe79a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.228833] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777331, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06976} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.230793] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1436.231926] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bc2164-ddb3-491c-bce7-23a0cde48c22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.235565] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29643e2d-6143-4419-beb3-83c79d049ac5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.262310] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec/6dbe4133-a6ba-4bba-9eb9-47a3d2691eec.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1436.263544] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4019cacf-9c2d-4cee-adfc-9b180d258bea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.308608] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc904e89-ed0c-43cd-9cc0-f4c86d4059bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.318350] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d78f30-f2ca-47ba-835f-323fe15c99c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.322665] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1436.322665] env[62619]: value = "task-1777332" [ 1436.322665] env[62619]: _type = "Task" [ 1436.322665] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.333974] env[62619]: DEBUG nova.compute.provider_tree [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.343380] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777332, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.579916] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525c47f6-75a8-67c0-c860-5d2123ca1f3a, 'name': SearchDatastore_Task, 'duration_secs': 0.031796} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.580473] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.581225] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 80363e16-5dd2-42ad-9ead-25b121d62211/80363e16-5dd2-42ad-9ead-25b121d62211.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1436.581754] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f48c9c74-7cec-438f-bd75-1b42d75574c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.593086] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1436.593086] env[62619]: value = "task-1777333" [ 1436.593086] env[62619]: _type = "Task" [ 1436.593086] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.603166] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777328, 'name': Rename_Task, 'duration_secs': 1.265631} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.603166] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1436.604018] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc2a19f4-9408-46cd-80f4-c331b9bb291c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.610046] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.618445] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1436.618445] env[62619]: value = "task-1777334" [ 1436.618445] env[62619]: _type = "Task" [ 1436.618445] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.636138] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777334, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.687046] env[62619]: DEBUG nova.network.neutron [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1436.701866] env[62619]: DEBUG nova.compute.manager [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1436.702553] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1436.707350] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3895b5c7-7ef8-4282-a0ca-050850ef75b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.710965] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777330, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.717587] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1436.718533] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2fb837f-1e81-441a-8724-cbc668f8aa14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.731041] env[62619]: DEBUG oslo_vmware.api [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for the task: (returnval){ [ 1436.731041] env[62619]: value = "task-1777335" [ 1436.731041] env[62619]: _type = "Task" [ 1436.731041] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.743086] env[62619]: DEBUG oslo_vmware.api [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.790312] env[62619]: DEBUG nova.compute.manager [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Received event network-vif-plugged-9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1436.790598] env[62619]: DEBUG oslo_concurrency.lockutils [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] Acquiring lock "7c058337-1684-4553-8e96-dd2cd1814a15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.790925] env[62619]: DEBUG oslo_concurrency.lockutils [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] Lock "7c058337-1684-4553-8e96-dd2cd1814a15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.791190] env[62619]: DEBUG oslo_concurrency.lockutils [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] Lock "7c058337-1684-4553-8e96-dd2cd1814a15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.791488] env[62619]: DEBUG nova.compute.manager [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] No waiting events found dispatching network-vif-plugged-9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1436.791783] env[62619]: WARNING nova.compute.manager [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Received unexpected event network-vif-plugged-9463c863-5c42-4fc9-a0c8-a6c9de3bddcd for instance with vm_state building and task_state spawning. [ 1436.792086] env[62619]: DEBUG nova.compute.manager [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Received event network-changed-9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1436.792435] env[62619]: DEBUG nova.compute.manager [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Refreshing instance network info cache due to event network-changed-9463c863-5c42-4fc9-a0c8-a6c9de3bddcd. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1436.792641] env[62619]: DEBUG oslo_concurrency.lockutils [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] Acquiring lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.839190] env[62619]: DEBUG nova.scheduler.client.report [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1436.848861] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777332, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.071743] env[62619]: DEBUG nova.network.neutron [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Updating instance_info_cache with network_info: [{"id": "9463c863-5c42-4fc9-a0c8-a6c9de3bddcd", "address": "fa:16:3e:31:14:da", "network": {"id": "f2a947a0-6efb-4fd5-9aa4-f9604a752455", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2058354215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90501fd522094b02a04da8bc54edbcde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9463c863-5c", "ovs_interfaceid": "9463c863-5c42-4fc9-a0c8-a6c9de3bddcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.107826] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777333, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.118703] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.118888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.131865] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777334, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.156473] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "78c7a111-d497-4114-b4f4-07319e6e7df2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.156719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "78c7a111-d497-4114-b4f4-07319e6e7df2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.200413] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777330, 'name': ReconfigVM_Task, 'duration_secs': 1.032691} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.200827] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Reconfigured VM instance instance-00000014 to attach disk [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca/e81c03f7-9c0e-46bd-9641-aced82038eca.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1437.201838] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-936ffb77-5b95-4b7c-802a-81e94a8e42fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.211520] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for the task: (returnval){ [ 1437.211520] env[62619]: value = "task-1777336" [ 1437.211520] env[62619]: _type = "Task" [ 1437.211520] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.220228] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777336, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.241520] env[62619]: DEBUG oslo_vmware.api [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777335, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.340022] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777332, 'name': ReconfigVM_Task, 'duration_secs': 0.530419} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.340022] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec/6dbe4133-a6ba-4bba-9eb9-47a3d2691eec.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1437.340022] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce142bf7-6456-4fb8-b2a7-47f4e11b4dec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.349148] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1437.349148] env[62619]: value = "task-1777337" [ 1437.349148] env[62619]: _type = "Task" [ 1437.349148] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.350019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.797s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.350557] env[62619]: DEBUG nova.compute.manager [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1437.358239] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.834s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.358525] env[62619]: DEBUG nova.objects.instance [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lazy-loading 'resources' on Instance uuid d7b2d831-b2ae-445c-887b-290171ae5d80 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1437.370560] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777337, 'name': Rename_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.576846] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Releasing lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.576846] env[62619]: DEBUG nova.compute.manager [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Instance network_info: |[{"id": "9463c863-5c42-4fc9-a0c8-a6c9de3bddcd", "address": "fa:16:3e:31:14:da", "network": {"id": "f2a947a0-6efb-4fd5-9aa4-f9604a752455", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2058354215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90501fd522094b02a04da8bc54edbcde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9463c863-5c", "ovs_interfaceid": "9463c863-5c42-4fc9-a0c8-a6c9de3bddcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1437.577123] env[62619]: DEBUG oslo_concurrency.lockutils [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] Acquired lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.577123] env[62619]: DEBUG nova.network.neutron [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Refreshing network info cache for port 9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1437.578211] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:14:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9463c863-5c42-4fc9-a0c8-a6c9de3bddcd', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1437.586325] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Creating folder: Project (90501fd522094b02a04da8bc54edbcde). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1437.589748] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83d573de-da36-48ac-a801-70db40820bbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.605410] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777333, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533644} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.606726] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 80363e16-5dd2-42ad-9ead-25b121d62211/80363e16-5dd2-42ad-9ead-25b121d62211.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1437.606941] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1437.607318] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Created folder: Project (90501fd522094b02a04da8bc54edbcde) in parent group-v368875. [ 1437.607515] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Creating folder: Instances. Parent ref: group-v368951. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1437.607805] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c46dd24-e3e7-4ab6-a577-7ae5fd814230 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.609923] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd763583-f087-4251-b30d-e95a37197b78 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.618230] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1437.618230] env[62619]: value = "task-1777340" [ 1437.618230] env[62619]: _type = "Task" [ 1437.618230] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.622935] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Created folder: Instances in parent group-v368951. [ 1437.623261] env[62619]: DEBUG oslo.service.loopingcall [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1437.626748] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1437.627547] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a0689ae-097c-48ca-be3d-1fabd85c45be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.648076] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.654759] env[62619]: DEBUG oslo_vmware.api [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777334, 'name': PowerOnVM_Task, 'duration_secs': 0.721295} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.655041] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1437.655251] env[62619]: INFO nova.compute.manager [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Took 13.20 seconds to spawn the instance on the hypervisor. [ 1437.655459] env[62619]: DEBUG nova.compute.manager [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1437.656294] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c843290d-5dcf-4ffb-99b6-c6b955470722 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.677393] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1437.677393] env[62619]: value = "task-1777341" [ 1437.677393] env[62619]: _type = "Task" [ 1437.677393] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.690823] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777341, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.722321] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777336, 'name': Rename_Task, 'duration_secs': 0.218526} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.725027] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1437.725640] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba462fe3-9c5c-49be-be91-81bd3f6ce561 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.738143] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Waiting for the task: (returnval){ [ 1437.738143] env[62619]: value = "task-1777342" [ 1437.738143] env[62619]: _type = "Task" [ 1437.738143] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.745456] env[62619]: DEBUG oslo_vmware.api [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777335, 'name': PowerOffVM_Task, 'duration_secs': 0.681935} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.746932] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1437.747042] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1437.747341] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4699edad-1fb2-498e-b029-793e5ac824a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.755307] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777342, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.860318] env[62619]: DEBUG nova.compute.utils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1437.864592] env[62619]: DEBUG nova.compute.manager [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1437.864592] env[62619]: DEBUG nova.network.neutron [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1437.877540] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777337, 'name': Rename_Task, 'duration_secs': 0.356547} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.877540] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1437.877540] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1437.877842] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Deleting the datastore file [datastore1] 4374c102-a6fe-45ef-ad49-a1295f96899a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1437.878950] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1437.879200] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64001fbe-8cf3-40f2-86cf-4b67cf77e0b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.881136] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1650b3b-805a-404c-850f-39066cc93653 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.888642] env[62619]: DEBUG oslo_vmware.api [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for the task: (returnval){ [ 1437.888642] env[62619]: value = "task-1777344" [ 1437.888642] env[62619]: _type = "Task" [ 1437.888642] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.890409] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1437.890409] env[62619]: value = "task-1777345" [ 1437.890409] env[62619]: _type = "Task" [ 1437.890409] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.907151] env[62619]: DEBUG oslo_vmware.api [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777344, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.912138] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777345, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.967642] env[62619]: DEBUG nova.policy [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '337abea0d4414c9895156256801a3629', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8311e2dca4814727b91967833796fc66', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1438.075019] env[62619]: DEBUG nova.network.neutron [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Updated VIF entry in instance network info cache for port 9463c863-5c42-4fc9-a0c8-a6c9de3bddcd. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1438.075369] env[62619]: DEBUG nova.network.neutron [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Updating instance_info_cache with network_info: [{"id": "9463c863-5c42-4fc9-a0c8-a6c9de3bddcd", "address": "fa:16:3e:31:14:da", "network": {"id": "f2a947a0-6efb-4fd5-9aa4-f9604a752455", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2058354215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90501fd522094b02a04da8bc54edbcde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9463c863-5c", "ovs_interfaceid": "9463c863-5c42-4fc9-a0c8-a6c9de3bddcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.133034] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070485} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.133339] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1438.134162] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f89eef-1309-40a8-a27e-c044b8040960 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.159739] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 80363e16-5dd2-42ad-9ead-25b121d62211/80363e16-5dd2-42ad-9ead-25b121d62211.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1438.160812] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16a1d01f-6dc6-497e-ab31-0819d958ff6d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.189029] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1438.189029] env[62619]: value = "task-1777346" [ 1438.189029] env[62619]: _type = "Task" [ 1438.189029] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.195403] env[62619]: INFO nova.compute.manager [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Took 46.79 seconds to build instance. [ 1438.203786] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777341, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.210231] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777346, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.256813] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777342, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.372668] env[62619]: DEBUG nova.compute.manager [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1438.415266] env[62619]: DEBUG oslo_vmware.api [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Task: {'id': task-1777344, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.41314} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.416350] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1438.417116] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1438.417116] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1438.417219] env[62619]: INFO nova.compute.manager [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1438.417472] env[62619]: DEBUG oslo.service.loopingcall [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1438.422238] env[62619]: DEBUG nova.compute.manager [-] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1438.422238] env[62619]: DEBUG nova.network.neutron [-] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1438.425858] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777345, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.530432] env[62619]: DEBUG nova.network.neutron [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Successfully created port: 467d9eda-baf0-4ee1-b652-587781aeb8e3 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1438.545595] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a24cf6-f4c7-4add-a906-4f7f415d3bfb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.554504] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce65abd-1ff2-4918-9ee4-e89c11d188d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.591952] env[62619]: DEBUG oslo_concurrency.lockutils [req-6778aff9-74e3-4852-9007-36748c374b3c req-0819139e-1640-492f-b000-2cf1b9101daf service nova] Releasing lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.593168] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1644ed8a-bad8-4526-b452-7be852ae38e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.601597] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980ce73d-c07b-4158-9015-91102f53fb28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.615930] env[62619]: DEBUG nova.compute.provider_tree [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1438.691580] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777341, 'name': CreateVM_Task, 'duration_secs': 0.752975} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.694836] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1438.695712] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.695882] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.696426] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1438.697317] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bd20380-6e9b-4b4c-83fd-48383cf41518 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.703373] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777346, 'name': ReconfigVM_Task, 'duration_secs': 0.308395} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.704356] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 80363e16-5dd2-42ad-9ead-25b121d62211/80363e16-5dd2-42ad-9ead-25b121d62211.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1438.705185] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee0d0746-5ff8-4dc4-892a-f585c6e22c91 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "b6aae13f-0711-4421-9d55-de7ece3e4b89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.713s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.705416] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b41ff2be-9bed-4706-a944-3643f5808ef4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.709432] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for the task: (returnval){ [ 1438.709432] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528429a4-e28e-2432-fbc1-24e799c5c095" [ 1438.709432] env[62619]: _type = "Task" [ 1438.709432] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.714784] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1438.714784] env[62619]: value = "task-1777347" [ 1438.714784] env[62619]: _type = "Task" [ 1438.714784] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.722491] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528429a4-e28e-2432-fbc1-24e799c5c095, 'name': SearchDatastore_Task, 'duration_secs': 0.010613} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.723315] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.723565] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1438.723794] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.723982] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.724161] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1438.727604] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09f01d43-ea03-4920-9f69-059dc1eb4134 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.729439] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777347, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.736575] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1438.736750] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1438.737556] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e4f6acf-37a7-40d5-809e-a6098425c7e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.746016] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for the task: (returnval){ [ 1438.746016] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52664e31-cad4-c967-fd03-adc162b823b9" [ 1438.746016] env[62619]: _type = "Task" [ 1438.746016] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.749827] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777342, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.758292] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52664e31-cad4-c967-fd03-adc162b823b9, 'name': SearchDatastore_Task, 'duration_secs': 0.00898} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.759047] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b336f198-2b52-4242-8fa5-21d1c94621e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.765234] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for the task: (returnval){ [ 1438.765234] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52712c88-8a01-4724-a8d1-bad3b1b9d30d" [ 1438.765234] env[62619]: _type = "Task" [ 1438.765234] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.772282] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52712c88-8a01-4724-a8d1-bad3b1b9d30d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.911222] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777345, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.917200] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquiring lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.917350] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.917630] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquiring lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.917763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.917927] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.921089] env[62619]: INFO nova.compute.manager [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Terminating instance [ 1439.119612] env[62619]: DEBUG nova.scheduler.client.report [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1439.212802] env[62619]: DEBUG nova.network.neutron [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Successfully created port: fc867071-21de-47e5-b245-cdd32ff75559 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1439.215921] env[62619]: DEBUG nova.compute.manager [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1439.228084] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777347, 'name': Rename_Task, 'duration_secs': 0.152555} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.228356] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1439.228602] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f742d81a-ab11-45c4-9fd6-f7ec93dde755 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.237067] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1439.237067] env[62619]: value = "task-1777348" [ 1439.237067] env[62619]: _type = "Task" [ 1439.237067] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.242751] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.251889] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777342, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.279621] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52712c88-8a01-4724-a8d1-bad3b1b9d30d, 'name': SearchDatastore_Task, 'duration_secs': 0.00891} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.280978] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.280978] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 7c058337-1684-4553-8e96-dd2cd1814a15/7c058337-1684-4553-8e96-dd2cd1814a15.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1439.280978] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cbf4192-404f-47d2-b879-160c644405de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.289263] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for the task: (returnval){ [ 1439.289263] env[62619]: value = "task-1777349" [ 1439.289263] env[62619]: _type = "Task" [ 1439.289263] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.301366] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777349, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.318870] env[62619]: DEBUG nova.compute.manager [req-0a7cf78c-399f-4369-8483-250fac4daac7 req-9dd11617-d9c7-4be5-8740-d49932ac8374 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Received event network-vif-deleted-8f4508cc-e861-44f6-82c8-f82bf1da6ef5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1439.318942] env[62619]: INFO nova.compute.manager [req-0a7cf78c-399f-4369-8483-250fac4daac7 req-9dd11617-d9c7-4be5-8740-d49932ac8374 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Neutron deleted interface 8f4508cc-e861-44f6-82c8-f82bf1da6ef5; detaching it from the instance and deleting it from the info cache [ 1439.320802] env[62619]: DEBUG nova.network.neutron [req-0a7cf78c-399f-4369-8483-250fac4daac7 req-9dd11617-d9c7-4be5-8740-d49932ac8374 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.386684] env[62619]: DEBUG nova.compute.manager [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1439.418347] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777345, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.421388] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1439.421641] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1439.421998] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1439.422321] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1439.422485] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1439.422634] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1439.422846] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1439.423173] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1439.423343] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1439.423857] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1439.424092] env[62619]: DEBUG nova.virt.hardware [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1439.425134] env[62619]: DEBUG nova.compute.manager [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1439.425352] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1439.427027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e71b2d1-cfa4-4931-aa9f-7880de77277f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.431556] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf337d2-abaa-4da2-a244-4bb3608fee15 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.445525] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec0c988-19b5-424c-a866-6e7bc54708e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.451013] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1439.451296] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fd591f2-21a4-4f50-a31e-05cf53113add {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.470076] env[62619]: DEBUG oslo_vmware.api [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for the task: (returnval){ [ 1439.470076] env[62619]: value = "task-1777350" [ 1439.470076] env[62619]: _type = "Task" [ 1439.470076] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.479363] env[62619]: DEBUG oslo_vmware.api [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.546450] env[62619]: DEBUG nova.network.neutron [-] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.627109] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.267s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.628969] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.301s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.631645] env[62619]: INFO nova.compute.claims [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1439.655092] env[62619]: INFO nova.scheduler.client.report [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Deleted allocations for instance d7b2d831-b2ae-445c-887b-290171ae5d80 [ 1439.752915] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777348, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.754531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1439.763189] env[62619]: DEBUG oslo_vmware.api [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Task: {'id': task-1777342, 'name': PowerOnVM_Task, 'duration_secs': 1.663011} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.763468] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1439.763750] env[62619]: DEBUG nova.compute.manager [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1439.764635] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20119991-17b7-4851-bc88-a9ab93b82647 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.803631] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777349, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.823578] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7af0561a-6b4e-4b3b-b2a1-7c8f9409b75d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.837227] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f6aae6-baa4-4941-b88a-11310d598fce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.862260] env[62619]: DEBUG nova.network.neutron [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Successfully created port: bfe09361-c134-4a13-a07f-d903524c0546 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1439.878625] env[62619]: DEBUG nova.compute.manager [req-0a7cf78c-399f-4369-8483-250fac4daac7 req-9dd11617-d9c7-4be5-8740-d49932ac8374 service nova] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Detach interface failed, port_id=8f4508cc-e861-44f6-82c8-f82bf1da6ef5, reason: Instance 4374c102-a6fe-45ef-ad49-a1295f96899a could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1439.909429] env[62619]: DEBUG oslo_vmware.api [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777345, 'name': PowerOnVM_Task, 'duration_secs': 1.551586} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.909621] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1439.909865] env[62619]: INFO nova.compute.manager [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Took 13.29 seconds to spawn the instance on the hypervisor. [ 1439.910110] env[62619]: DEBUG nova.compute.manager [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1439.911025] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1058a22-3c11-4a63-9466-d1b1a5e9d34e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.984358] env[62619]: DEBUG oslo_vmware.api [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777350, 'name': PowerOffVM_Task, 'duration_secs': 0.307306} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.985296] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1439.985472] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1439.985740] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93100a96-a158-44b3-9047-a9b0c36c5475 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.049227] env[62619]: INFO nova.compute.manager [-] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Took 1.63 seconds to deallocate network for instance. [ 1440.086615] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1440.086880] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1440.087027] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Deleting the datastore file [datastore1] 91ce0ab3-4fa4-4992-995a-0baeec91d9d0 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1440.087327] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14117c9c-aaed-4dbe-a1e7-c3f97452d735 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.096465] env[62619]: DEBUG oslo_vmware.api [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for the task: (returnval){ [ 1440.096465] env[62619]: value = "task-1777352" [ 1440.096465] env[62619]: _type = "Task" [ 1440.096465] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.107365] env[62619]: DEBUG oslo_vmware.api [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777352, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.169295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9478acab-c4a2-446a-80fd-b2a43c23e4f6 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "d7b2d831-b2ae-445c-887b-290171ae5d80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.817s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.244545] env[62619]: DEBUG oslo_vmware.api [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777348, 'name': PowerOnVM_Task, 'duration_secs': 0.716474} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.244837] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1440.245048] env[62619]: INFO nova.compute.manager [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Took 10.85 seconds to spawn the instance on the hypervisor. [ 1440.245228] env[62619]: DEBUG nova.compute.manager [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1440.246045] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78497d6-868f-4375-b6c1-017443336f76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.291152] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.301669] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777349, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571773} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.302647] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 7c058337-1684-4553-8e96-dd2cd1814a15/7c058337-1684-4553-8e96-dd2cd1814a15.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1440.302860] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1440.303123] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e7b7163-8bca-4655-a4f5-1cba98344188 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.309749] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for the task: (returnval){ [ 1440.309749] env[62619]: value = "task-1777353" [ 1440.309749] env[62619]: _type = "Task" [ 1440.309749] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.324561] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.440869] env[62619]: INFO nova.compute.manager [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Took 47.64 seconds to build instance. [ 1440.542122] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.542395] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.556031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.607024] env[62619]: DEBUG oslo_vmware.api [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Task: {'id': task-1777352, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137502} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.607317] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1440.607508] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1440.607683] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1440.607852] env[62619]: INFO nova.compute.manager [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1440.608218] env[62619]: DEBUG oslo.service.loopingcall [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1440.608600] env[62619]: DEBUG nova.compute.manager [-] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1440.608707] env[62619]: DEBUG nova.network.neutron [-] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1440.765634] env[62619]: INFO nova.compute.manager [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Took 46.83 seconds to build instance. [ 1440.818780] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069272} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.821416] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1440.824910] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d3e7fb-2602-4607-b9a2-87009c48768c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.856169] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 7c058337-1684-4553-8e96-dd2cd1814a15/7c058337-1684-4553-8e96-dd2cd1814a15.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1440.859050] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f7f8721-a8bd-47db-962c-57985e609640 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.879463] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for the task: (returnval){ [ 1440.879463] env[62619]: value = "task-1777354" [ 1440.879463] env[62619]: _type = "Task" [ 1440.879463] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.887688] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777354, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.948540] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2516e5eb-8151-4607-98d2-37a72068030c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.272s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.275194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6c3e6fb2-36a8-4b17-8889-18c38bae304b tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "80363e16-5dd2-42ad-9ead-25b121d62211" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.830s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.290923] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891a4c38-12b5-4996-9bf2-07abf79609c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.299837] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2091f7a5-5465-4121-a34b-c007f61652dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.339265] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2f9940-fcfc-4579-aa6c-4abf20446727 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.347607] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7911269a-8143-482b-8d89-3b76cfd552ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.363305] env[62619]: DEBUG nova.compute.provider_tree [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.391338] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777354, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.458498] env[62619]: DEBUG nova.compute.manager [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1441.575535] env[62619]: DEBUG nova.compute.manager [req-dbbbff9a-a26d-48c0-ab57-cecc6ad67836 req-8604065f-c555-490a-a622-72ebf5279d64 service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Received event network-vif-deleted-19b98734-638c-48c1-aa4b-58c310858ab7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1441.575737] env[62619]: INFO nova.compute.manager [req-dbbbff9a-a26d-48c0-ab57-cecc6ad67836 req-8604065f-c555-490a-a622-72ebf5279d64 service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Neutron deleted interface 19b98734-638c-48c1-aa4b-58c310858ab7; detaching it from the instance and deleting it from the info cache [ 1441.575906] env[62619]: DEBUG nova.network.neutron [req-dbbbff9a-a26d-48c0-ab57-cecc6ad67836 req-8604065f-c555-490a-a622-72ebf5279d64 service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.683759] env[62619]: DEBUG nova.network.neutron [-] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.781143] env[62619]: DEBUG nova.compute.manager [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1441.866750] env[62619]: DEBUG nova.scheduler.client.report [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1441.896093] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777354, 'name': ReconfigVM_Task, 'duration_secs': 0.719672} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.896387] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 7c058337-1684-4553-8e96-dd2cd1814a15/7c058337-1684-4553-8e96-dd2cd1814a15.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1441.897024] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd3dd88d-1bf7-426a-961f-206a2bd3acef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.908265] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for the task: (returnval){ [ 1441.908265] env[62619]: value = "task-1777355" [ 1441.908265] env[62619]: _type = "Task" [ 1441.908265] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.921509] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777355, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.994702] env[62619]: DEBUG nova.network.neutron [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Successfully updated port: 467d9eda-baf0-4ee1-b652-587781aeb8e3 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1441.998186] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.001134] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "e81c03f7-9c0e-46bd-9641-aced82038eca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.002384] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "e81c03f7-9c0e-46bd-9641-aced82038eca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.002384] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "e81c03f7-9c0e-46bd-9641-aced82038eca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.002384] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "e81c03f7-9c0e-46bd-9641-aced82038eca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.002384] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "e81c03f7-9c0e-46bd-9641-aced82038eca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.004422] env[62619]: INFO nova.compute.manager [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Terminating instance [ 1442.079371] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-893d2a98-b84f-4a88-8a92-029bd78fd6e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.089477] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9f54f9-eecd-4c95-a29b-0f94af5bcb35 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.124072] env[62619]: DEBUG nova.compute.manager [req-dbbbff9a-a26d-48c0-ab57-cecc6ad67836 req-8604065f-c555-490a-a622-72ebf5279d64 service nova] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Detach interface failed, port_id=19b98734-638c-48c1-aa4b-58c310858ab7, reason: Instance 91ce0ab3-4fa4-4992-995a-0baeec91d9d0 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1442.186551] env[62619]: INFO nova.compute.manager [-] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Took 1.58 seconds to deallocate network for instance. [ 1442.303363] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.372157] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.743s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.372798] env[62619]: DEBUG nova.compute.manager [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1442.376754] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.510s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.377411] env[62619]: INFO nova.compute.claims [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1442.432022] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777355, 'name': Rename_Task, 'duration_secs': 0.339662} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.432022] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1442.432022] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-296d623d-30aa-4539-a617-74921294aa37 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.440710] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for the task: (returnval){ [ 1442.440710] env[62619]: value = "task-1777356" [ 1442.440710] env[62619]: _type = "Task" [ 1442.440710] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.450915] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.508161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "refresh_cache-e81c03f7-9c0e-46bd-9641-aced82038eca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.508360] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquired lock "refresh_cache-e81c03f7-9c0e-46bd-9641-aced82038eca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.508536] env[62619]: DEBUG nova.network.neutron [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1442.693520] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.888077] env[62619]: DEBUG nova.compute.utils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1442.888077] env[62619]: DEBUG nova.compute.manager [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1442.888077] env[62619]: DEBUG nova.network.neutron [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1442.951093] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777356, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.952665] env[62619]: DEBUG nova.policy [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cde3ba9ee004055bb5e09bc932dc4f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0948c27a2b08413ba82d553452965c9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1443.031131] env[62619]: DEBUG nova.network.neutron [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1443.120928] env[62619]: DEBUG nova.network.neutron [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.361082] env[62619]: DEBUG nova.network.neutron [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Successfully created port: 6ffb9058-5f89-4766-b8d7-716e21a551f4 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1443.391946] env[62619]: DEBUG nova.compute.manager [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1443.451347] env[62619]: DEBUG oslo_vmware.api [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777356, 'name': PowerOnVM_Task, 'duration_secs': 0.765377} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.451645] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1443.451841] env[62619]: INFO nova.compute.manager [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Took 9.05 seconds to spawn the instance on the hypervisor. [ 1443.452024] env[62619]: DEBUG nova.compute.manager [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1443.452913] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11581b9b-3f29-4de7-bef6-171ff5cf64d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.625700] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Releasing lock "refresh_cache-e81c03f7-9c0e-46bd-9641-aced82038eca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.625700] env[62619]: DEBUG nova.compute.manager [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1443.625836] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1443.627058] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d7c3e5-3b60-462f-8735-7742d6214986 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.637070] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1443.637330] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c57819a6-07da-4add-830e-8d9b9db26fa7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.645520] env[62619]: DEBUG oslo_vmware.api [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1443.645520] env[62619]: value = "task-1777357" [ 1443.645520] env[62619]: _type = "Task" [ 1443.645520] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.659106] env[62619]: DEBUG oslo_vmware.api [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.854252] env[62619]: DEBUG nova.compute.manager [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1443.855274] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba2e891-b4b5-47e8-999d-f5ce00c24ef2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.957723] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a985c7-c297-484f-ad18-c8f2ba050146 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.966458] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f01be7-9be1-4d32-89b6-088d8b86fdcf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.975962] env[62619]: INFO nova.compute.manager [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Took 43.54 seconds to build instance. [ 1444.010194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bbb1a824-14db-4013-a54e-047151535e69 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lock "7c058337-1684-4553-8e96-dd2cd1814a15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.479s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.011129] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603287bb-c6ee-4316-b4e9-35765a8c99e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.021055] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a2c5ac-7a2a-425f-8dd1-2ac5de74c822 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.036186] env[62619]: DEBUG nova.compute.provider_tree [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1444.039380] env[62619]: DEBUG nova.compute.manager [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received event network-vif-plugged-467d9eda-baf0-4ee1-b652-587781aeb8e3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1444.039583] env[62619]: DEBUG oslo_concurrency.lockutils [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] Acquiring lock "6be4f813-7171-4515-a728-5cf34665205a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.039781] env[62619]: DEBUG oslo_concurrency.lockutils [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] Lock "6be4f813-7171-4515-a728-5cf34665205a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.039940] env[62619]: DEBUG oslo_concurrency.lockutils [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] Lock "6be4f813-7171-4515-a728-5cf34665205a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.040124] env[62619]: DEBUG nova.compute.manager [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] No waiting events found dispatching network-vif-plugged-467d9eda-baf0-4ee1-b652-587781aeb8e3 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1444.040317] env[62619]: WARNING nova.compute.manager [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received unexpected event network-vif-plugged-467d9eda-baf0-4ee1-b652-587781aeb8e3 for instance with vm_state building and task_state spawning. [ 1444.040474] env[62619]: DEBUG nova.compute.manager [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received event network-changed-467d9eda-baf0-4ee1-b652-587781aeb8e3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1444.040620] env[62619]: DEBUG nova.compute.manager [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Refreshing instance network info cache due to event network-changed-467d9eda-baf0-4ee1-b652-587781aeb8e3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1444.040795] env[62619]: DEBUG oslo_concurrency.lockutils [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] Acquiring lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.041055] env[62619]: DEBUG oslo_concurrency.lockutils [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] Acquired lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.041230] env[62619]: DEBUG nova.network.neutron [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Refreshing network info cache for port 467d9eda-baf0-4ee1-b652-587781aeb8e3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1444.158465] env[62619]: DEBUG oslo_vmware.api [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777357, 'name': PowerOffVM_Task, 'duration_secs': 0.205224} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.158747] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1444.158911] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1444.159203] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7526ce8-ba75-4043-86c3-98dae8af6e14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.185421] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1444.185636] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1444.185810] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Deleting the datastore file [datastore1] e81c03f7-9c0e-46bd-9641-aced82038eca {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1444.186081] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b6ded1b-c540-4064-b12a-e50e470f1452 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.192903] env[62619]: DEBUG oslo_vmware.api [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for the task: (returnval){ [ 1444.192903] env[62619]: value = "task-1777359" [ 1444.192903] env[62619]: _type = "Task" [ 1444.192903] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.200860] env[62619]: DEBUG oslo_vmware.api [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777359, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.271560] env[62619]: DEBUG nova.network.neutron [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Successfully updated port: fc867071-21de-47e5-b245-cdd32ff75559 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1444.369238] env[62619]: INFO nova.compute.manager [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] instance snapshotting [ 1444.371910] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b6e67a-7c97-4be6-a078-b07073fc5ddb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.392619] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7073c487-9e2e-4549-adc1-1d1b319d798c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.404602] env[62619]: DEBUG nova.compute.manager [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1444.427398] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1444.428514] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1444.428514] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1444.428514] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1444.428514] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1444.428514] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1444.428514] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1444.428724] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1444.428724] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1444.428886] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1444.429060] env[62619]: DEBUG nova.virt.hardware [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1444.429919] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4a2ceb-3689-4dba-9263-97293a76a1ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.439252] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c210d8-f875-4969-b146-0f112f5554d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.517532] env[62619]: DEBUG nova.compute.manager [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1444.543311] env[62619]: DEBUG nova.scheduler.client.report [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1444.606320] env[62619]: DEBUG nova.network.neutron [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1444.702520] env[62619]: DEBUG oslo_vmware.api [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Task: {'id': task-1777359, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099506} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.702767] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1444.703225] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1444.703461] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1444.704114] env[62619]: INFO nova.compute.manager [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1444.704444] env[62619]: DEBUG oslo.service.loopingcall [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1444.705165] env[62619]: DEBUG nova.compute.manager [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1444.705289] env[62619]: DEBUG nova.network.neutron [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1444.748057] env[62619]: DEBUG nova.network.neutron [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1444.821658] env[62619]: DEBUG nova.network.neutron [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.902764] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1444.903021] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d54fb991-66e2-487a-98e9-ce1c5b26101d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.910890] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1444.910890] env[62619]: value = "task-1777360" [ 1444.910890] env[62619]: _type = "Task" [ 1444.910890] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.920917] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777360, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.042452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.050555] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.051091] env[62619]: DEBUG nova.compute.manager [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1445.058027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.524s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.058027] env[62619]: INFO nova.compute.claims [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1445.250773] env[62619]: DEBUG nova.network.neutron [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.324662] env[62619]: DEBUG oslo_concurrency.lockutils [req-011169c8-1617-41e0-92fc-981ce31d1d86 req-2e7bfa8f-eba3-4523-b5cb-ec530e49ae25 service nova] Releasing lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.422751] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777360, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.520130] env[62619]: DEBUG nova.network.neutron [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Successfully updated port: 6ffb9058-5f89-4766-b8d7-716e21a551f4 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1445.570027] env[62619]: DEBUG nova.compute.utils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1445.570027] env[62619]: DEBUG nova.compute.manager [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1445.570027] env[62619]: DEBUG nova.network.neutron [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1445.651616] env[62619]: DEBUG nova.policy [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97c6890028244af29f5c7979524dc958', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9b2c21c3d424352a680c5f9660a8d7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1445.753859] env[62619]: INFO nova.compute.manager [-] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Took 1.05 seconds to deallocate network for instance. [ 1445.922517] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777360, 'name': CreateSnapshot_Task, 'duration_secs': 0.804721} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.922854] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1445.923892] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b70f6b-775e-44ca-be96-4ebc391e140e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.985575] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.986285] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.986761] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.987043] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.987262] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.989678] env[62619]: INFO nova.compute.manager [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Terminating instance [ 1446.022944] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "refresh_cache-2a41be15-efaf-4e78-a278-2711cb11e98f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.023111] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "refresh_cache-2a41be15-efaf-4e78-a278-2711cb11e98f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.023280] env[62619]: DEBUG nova.network.neutron [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1446.074458] env[62619]: DEBUG nova.compute.manager [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1446.088757] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquiring lock "39adf15c-f77e-4737-aeeb-258887007b9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.089074] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "39adf15c-f77e-4737-aeeb-258887007b9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.213320] env[62619]: DEBUG nova.network.neutron [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Successfully created port: a1723b81-03da-4080-8d54-cacb839c3f1d {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1446.261029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.442732] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1446.445520] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ace09922-f8e4-4c52-81ba-f5c1ec73af63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.454549] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1446.454549] env[62619]: value = "task-1777361" [ 1446.454549] env[62619]: _type = "Task" [ 1446.454549] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.462834] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777361, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.494940] env[62619]: DEBUG nova.compute.manager [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1446.495179] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1446.496051] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1046586-5d55-482e-933f-b2e83ad41f1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.504172] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1446.504428] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aee2a192-ef5c-40cb-ba2c-6e8aee06ca1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.511160] env[62619]: DEBUG oslo_vmware.api [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1446.511160] env[62619]: value = "task-1777362" [ 1446.511160] env[62619]: _type = "Task" [ 1446.511160] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.522750] env[62619]: DEBUG oslo_vmware.api [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777362, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.660371] env[62619]: DEBUG nova.network.neutron [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1446.670129] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e6836b-cbfe-4141-a0c0-c51923421648 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.683669] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46b6e41-5674-447e-8a0d-06811e37f7ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.722488] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d76b90c-850d-45c0-a640-e2ba95a4ac0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.731056] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4c3131-531c-4034-91df-41189bc97489 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.745347] env[62619]: DEBUG nova.compute.provider_tree [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1446.965191] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777361, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.020869] env[62619]: DEBUG oslo_vmware.api [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777362, 'name': PowerOffVM_Task, 'duration_secs': 0.205697} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.021257] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1447.021491] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1447.021698] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7cf81c42-5168-43c2-99d3-b36a10750393 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.086653] env[62619]: DEBUG nova.compute.manager [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received event network-vif-plugged-fc867071-21de-47e5-b245-cdd32ff75559 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1447.086911] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Acquiring lock "6be4f813-7171-4515-a728-5cf34665205a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.087187] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Lock "6be4f813-7171-4515-a728-5cf34665205a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.087385] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Lock "6be4f813-7171-4515-a728-5cf34665205a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.087555] env[62619]: DEBUG nova.compute.manager [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] No waiting events found dispatching network-vif-plugged-fc867071-21de-47e5-b245-cdd32ff75559 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1447.087762] env[62619]: WARNING nova.compute.manager [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received unexpected event network-vif-plugged-fc867071-21de-47e5-b245-cdd32ff75559 for instance with vm_state building and task_state spawning. [ 1447.087984] env[62619]: DEBUG nova.compute.manager [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received event network-changed-fc867071-21de-47e5-b245-cdd32ff75559 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1447.088172] env[62619]: DEBUG nova.compute.manager [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Refreshing instance network info cache due to event network-changed-fc867071-21de-47e5-b245-cdd32ff75559. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1447.088386] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Acquiring lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.088524] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Acquired lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.088728] env[62619]: DEBUG nova.network.neutron [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Refreshing network info cache for port fc867071-21de-47e5-b245-cdd32ff75559 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1447.099163] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1447.099386] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1447.099570] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Deleting the datastore file [datastore1] e9ca5148-f188-4a15-83ae-8f3d730b0dab {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1447.100810] env[62619]: DEBUG nova.compute.manager [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1447.102878] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07f184a5-08d3-4e2c-a719-5f204c9945a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.111403] env[62619]: DEBUG oslo_vmware.api [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1447.111403] env[62619]: value = "task-1777364" [ 1447.111403] env[62619]: _type = "Task" [ 1447.111403] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.123077] env[62619]: DEBUG oslo_vmware.api [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.131825] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1447.132095] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1447.132278] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1447.132476] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1447.132806] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1447.132881] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1447.133057] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1447.133217] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1447.133377] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1447.133531] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1447.133698] env[62619]: DEBUG nova.virt.hardware [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1447.134673] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea3ac15-81dc-442a-8b05-87c7dd85a915 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.143352] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80747c25-06a0-4405-a760-ec6dcd3e1725 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.184617] env[62619]: DEBUG nova.network.neutron [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Updating instance_info_cache with network_info: [{"id": "6ffb9058-5f89-4766-b8d7-716e21a551f4", "address": "fa:16:3e:03:48:b8", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ffb9058-5f", "ovs_interfaceid": "6ffb9058-5f89-4766-b8d7-716e21a551f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.252026] env[62619]: DEBUG nova.scheduler.client.report [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1447.467533] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777361, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.505645] env[62619]: DEBUG nova.network.neutron [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Successfully updated port: bfe09361-c134-4a13-a07f-d903524c0546 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1447.621942] env[62619]: DEBUG oslo_vmware.api [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162152} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.622243] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1447.622418] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1447.622586] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1447.622750] env[62619]: INFO nova.compute.manager [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1447.622984] env[62619]: DEBUG oslo.service.loopingcall [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1447.623208] env[62619]: DEBUG nova.compute.manager [-] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1447.623364] env[62619]: DEBUG nova.network.neutron [-] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1447.681457] env[62619]: DEBUG nova.network.neutron [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1447.686982] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "refresh_cache-2a41be15-efaf-4e78-a278-2711cb11e98f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.687357] env[62619]: DEBUG nova.compute.manager [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Instance network_info: |[{"id": "6ffb9058-5f89-4766-b8d7-716e21a551f4", "address": "fa:16:3e:03:48:b8", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ffb9058-5f", "ovs_interfaceid": "6ffb9058-5f89-4766-b8d7-716e21a551f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1447.687841] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:48:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a27fd90b-16a5-43af-bede-ae36762ece00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ffb9058-5f89-4766-b8d7-716e21a551f4', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1447.698825] env[62619]: DEBUG oslo.service.loopingcall [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1447.700064] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1447.700466] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-060c0f50-8b94-4ceb-8e49-259eced6e66d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.731664] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1447.731664] env[62619]: value = "task-1777365" [ 1447.731664] env[62619]: _type = "Task" [ 1447.731664] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.741388] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777365, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.754712] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.700s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.755259] env[62619]: DEBUG nova.compute.manager [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1447.758130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.693s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.758344] env[62619]: DEBUG nova.objects.instance [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1447.969310] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777361, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.971291] env[62619]: DEBUG nova.network.neutron [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.009264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.056924] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "312aed5b-a66e-4428-ac1b-483dc2b38291" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.057283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "312aed5b-a66e-4428-ac1b-483dc2b38291" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.057549] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "312aed5b-a66e-4428-ac1b-483dc2b38291-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.057748] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "312aed5b-a66e-4428-ac1b-483dc2b38291-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.057919] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "312aed5b-a66e-4428-ac1b-483dc2b38291-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.060201] env[62619]: INFO nova.compute.manager [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Terminating instance [ 1448.244941] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777365, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.265331] env[62619]: DEBUG nova.compute.utils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1448.268888] env[62619]: DEBUG nova.compute.manager [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1448.269348] env[62619]: DEBUG nova.network.neutron [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1448.424141] env[62619]: DEBUG nova.policy [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7492621ad8f42fab35b55b3615d38e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82e5715bea444a7d8da5c897258df611', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1448.469384] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777361, 'name': CloneVM_Task, 'duration_secs': 1.824679} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.471266] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Created linked-clone VM from snapshot [ 1448.471266] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36379f5-d032-4c99-b32d-f56529cad2b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.475504] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Releasing lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.476204] env[62619]: DEBUG nova.compute.manager [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Received event network-vif-plugged-6ffb9058-5f89-4766-b8d7-716e21a551f4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1448.476521] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Acquiring lock "2a41be15-efaf-4e78-a278-2711cb11e98f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.479031] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Lock "2a41be15-efaf-4e78-a278-2711cb11e98f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.479031] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Lock "2a41be15-efaf-4e78-a278-2711cb11e98f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.479031] env[62619]: DEBUG nova.compute.manager [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] No waiting events found dispatching network-vif-plugged-6ffb9058-5f89-4766-b8d7-716e21a551f4 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1448.479031] env[62619]: WARNING nova.compute.manager [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Received unexpected event network-vif-plugged-6ffb9058-5f89-4766-b8d7-716e21a551f4 for instance with vm_state building and task_state spawning. [ 1448.479031] env[62619]: DEBUG nova.compute.manager [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Received event network-changed-6ffb9058-5f89-4766-b8d7-716e21a551f4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1448.479256] env[62619]: DEBUG nova.compute.manager [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Refreshing instance network info cache due to event network-changed-6ffb9058-5f89-4766-b8d7-716e21a551f4. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1448.479256] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Acquiring lock "refresh_cache-2a41be15-efaf-4e78-a278-2711cb11e98f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.479256] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Acquired lock "refresh_cache-2a41be15-efaf-4e78-a278-2711cb11e98f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.479256] env[62619]: DEBUG nova.network.neutron [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Refreshing network info cache for port 6ffb9058-5f89-4766-b8d7-716e21a551f4 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1448.483950] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquired lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.484609] env[62619]: DEBUG nova.network.neutron [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1448.485897] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Uploading image 59235677-cc77-476d-b578-0801f2854047 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1448.524118] env[62619]: DEBUG oslo_vmware.rw_handles [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1448.524118] env[62619]: value = "vm-368955" [ 1448.524118] env[62619]: _type = "VirtualMachine" [ 1448.524118] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1448.524312] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-57373e5a-c4c6-486c-ab88-606591304473 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.531442] env[62619]: DEBUG oslo_vmware.rw_handles [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lease: (returnval){ [ 1448.531442] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5220af0a-e3f2-f4f8-2e86-98cdf650ffc8" [ 1448.531442] env[62619]: _type = "HttpNfcLease" [ 1448.531442] env[62619]: } obtained for exporting VM: (result){ [ 1448.531442] env[62619]: value = "vm-368955" [ 1448.531442] env[62619]: _type = "VirtualMachine" [ 1448.531442] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1448.531717] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the lease: (returnval){ [ 1448.531717] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5220af0a-e3f2-f4f8-2e86-98cdf650ffc8" [ 1448.531717] env[62619]: _type = "HttpNfcLease" [ 1448.531717] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1448.541472] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1448.541472] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5220af0a-e3f2-f4f8-2e86-98cdf650ffc8" [ 1448.541472] env[62619]: _type = "HttpNfcLease" [ 1448.541472] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1448.564575] env[62619]: DEBUG nova.compute.manager [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1448.564824] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1448.565712] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f36d555-6b2e-44ed-bc69-62b388425db6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.575306] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1448.575587] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2396c22-961e-41ff-83e3-82a45927b40f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.583231] env[62619]: DEBUG oslo_vmware.api [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1448.583231] env[62619]: value = "task-1777367" [ 1448.583231] env[62619]: _type = "Task" [ 1448.583231] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.595832] env[62619]: DEBUG oslo_vmware.api [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777367, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.748512] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777365, 'name': CreateVM_Task, 'duration_secs': 0.832488} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.748737] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1448.750351] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.750602] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.751168] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1448.752086] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67d7b036-7296-4176-8eb7-9c44b3cb8b15 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.758395] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1448.758395] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5272e231-9a82-7684-1353-80f32fba0382" [ 1448.758395] env[62619]: _type = "Task" [ 1448.758395] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.776614] env[62619]: DEBUG nova.compute.manager [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1448.782765] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3dd91c23-6bca-4fa0-a85e-ba8a2652194a tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.025s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.787719] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5272e231-9a82-7684-1353-80f32fba0382, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.787719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.381s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.787719] env[62619]: INFO nova.compute.claims [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1448.792822] env[62619]: DEBUG nova.network.neutron [-] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.039917] env[62619]: DEBUG nova.network.neutron [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1449.047960] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1449.047960] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5220af0a-e3f2-f4f8-2e86-98cdf650ffc8" [ 1449.047960] env[62619]: _type = "HttpNfcLease" [ 1449.047960] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1449.048775] env[62619]: DEBUG oslo_vmware.rw_handles [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1449.048775] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5220af0a-e3f2-f4f8-2e86-98cdf650ffc8" [ 1449.048775] env[62619]: _type = "HttpNfcLease" [ 1449.048775] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1449.050640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b7efa0-bb92-494d-ad7c-c19c8fe26ad9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.062330] env[62619]: DEBUG oslo_vmware.rw_handles [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525843a5-0ffb-dcc6-05d8-a65823d4ce27/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1449.062789] env[62619]: DEBUG oslo_vmware.rw_handles [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525843a5-0ffb-dcc6-05d8-a65823d4ce27/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1449.183052] env[62619]: DEBUG oslo_vmware.api [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777367, 'name': PowerOffVM_Task, 'duration_secs': 0.360956} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.183491] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1449.187019] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1449.187019] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd67466e-e46e-4d1c-9cd6-f092e2fbb50d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.269570] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5272e231-9a82-7684-1353-80f32fba0382, 'name': SearchDatastore_Task, 'duration_secs': 0.012912} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.271189] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.271189] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1449.271189] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.271189] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.271189] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1449.271730] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c15f716a-f686-4ccd-94f4-55b962bfa51b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.296587] env[62619]: INFO nova.compute.manager [-] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Took 1.67 seconds to deallocate network for instance. [ 1449.296884] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1449.297057] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1449.299300] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75c9a6a8-289e-4b95-bf1e-77564ecae376 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.312467] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1449.312467] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527a4700-834c-9677-59fa-5e919decff60" [ 1449.312467] env[62619]: _type = "Task" [ 1449.312467] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.324778] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5e7343a2-6220-4900-8012-9f5870e34ef5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.334492] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527a4700-834c-9677-59fa-5e919decff60, 'name': SearchDatastore_Task, 'duration_secs': 0.010823} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.339562] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1449.339861] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1449.340109] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Deleting the datastore file [datastore1] 312aed5b-a66e-4428-ac1b-483dc2b38291 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1449.340414] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ae4b5d6-436c-403b-9ef2-b7811a1a050c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.343577] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b05e194-7278-4374-9673-5d16f27f149d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.351610] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1449.351610] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f60c88-917f-4602-8091-cf7bd78ea376" [ 1449.351610] env[62619]: _type = "Task" [ 1449.351610] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.356528] env[62619]: DEBUG oslo_vmware.api [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for the task: (returnval){ [ 1449.356528] env[62619]: value = "task-1777369" [ 1449.356528] env[62619]: _type = "Task" [ 1449.356528] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.369093] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f60c88-917f-4602-8091-cf7bd78ea376, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.381348] env[62619]: DEBUG oslo_vmware.api [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777369, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.540962] env[62619]: DEBUG nova.network.neutron [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Successfully updated port: a1723b81-03da-4080-8d54-cacb839c3f1d {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1449.577084] env[62619]: DEBUG nova.network.neutron [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Successfully created port: a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1449.726873] env[62619]: DEBUG nova.network.neutron [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Updated VIF entry in instance network info cache for port 6ffb9058-5f89-4766-b8d7-716e21a551f4. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1449.727352] env[62619]: DEBUG nova.network.neutron [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Updating instance_info_cache with network_info: [{"id": "6ffb9058-5f89-4766-b8d7-716e21a551f4", "address": "fa:16:3e:03:48:b8", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ffb9058-5f", "ovs_interfaceid": "6ffb9058-5f89-4766-b8d7-716e21a551f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.792084] env[62619]: DEBUG nova.compute.manager [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1449.806911] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.823637] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1449.825104] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1449.825256] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1449.826579] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1449.826916] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1449.827764] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1449.828109] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1449.828274] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1449.829434] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1449.829604] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1449.831184] env[62619]: DEBUG nova.virt.hardware [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1449.832092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1442e301-45ba-47f6-ac96-46f67fc66e95 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.846802] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3664f7aa-84b2-485f-96fe-0afa7bbcaef9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.878431] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f60c88-917f-4602-8091-cf7bd78ea376, 'name': SearchDatastore_Task, 'duration_secs': 0.017913} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.881942] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.882446] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 2a41be15-efaf-4e78-a278-2711cb11e98f/2a41be15-efaf-4e78-a278-2711cb11e98f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1449.883315] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85de4142-d61f-4d5e-a04f-bec282f141e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.891218] env[62619]: DEBUG oslo_vmware.api [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Task: {'id': task-1777369, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266628} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.892373] env[62619]: DEBUG nova.network.neutron [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Updating instance_info_cache with network_info: [{"id": "467d9eda-baf0-4ee1-b652-587781aeb8e3", "address": "fa:16:3e:c6:8a:94", "network": {"id": "869ae219-0e41-4a92-a73d-09ecc0a7efe6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1589566262", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap467d9eda-ba", "ovs_interfaceid": "467d9eda-baf0-4ee1-b652-587781aeb8e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc867071-21de-47e5-b245-cdd32ff75559", "address": "fa:16:3e:32:6b:c3", "network": {"id": "4b2ae7d4-762d-4f1c-8ed8-ce0de4df0c2f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1674421522", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc867071-21", "ovs_interfaceid": "fc867071-21de-47e5-b245-cdd32ff75559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bfe09361-c134-4a13-a07f-d903524c0546", "address": "fa:16:3e:ec:03:da", "network": {"id": "869ae219-0e41-4a92-a73d-09ecc0a7efe6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1589566262", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfe09361-c1", "ovs_interfaceid": "bfe09361-c134-4a13-a07f-d903524c0546", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.894125] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1449.894385] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1449.894578] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1449.894762] env[62619]: INFO nova.compute.manager [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1449.895048] env[62619]: DEBUG oslo.service.loopingcall [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1449.897614] env[62619]: DEBUG nova.compute.manager [-] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1449.897726] env[62619]: DEBUG nova.network.neutron [-] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1449.901552] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1449.901552] env[62619]: value = "task-1777370" [ 1449.901552] env[62619]: _type = "Task" [ 1449.901552] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.911164] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.956892] env[62619]: DEBUG nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received event network-vif-plugged-bfe09361-c134-4a13-a07f-d903524c0546 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1449.956892] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Acquiring lock "6be4f813-7171-4515-a728-5cf34665205a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.956892] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Lock "6be4f813-7171-4515-a728-5cf34665205a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.956892] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Lock "6be4f813-7171-4515-a728-5cf34665205a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.956892] env[62619]: DEBUG nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] No waiting events found dispatching network-vif-plugged-bfe09361-c134-4a13-a07f-d903524c0546 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1449.957077] env[62619]: WARNING nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received unexpected event network-vif-plugged-bfe09361-c134-4a13-a07f-d903524c0546 for instance with vm_state building and task_state spawning. [ 1449.957422] env[62619]: DEBUG nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received event network-changed-bfe09361-c134-4a13-a07f-d903524c0546 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1449.957493] env[62619]: DEBUG nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Refreshing instance network info cache due to event network-changed-bfe09361-c134-4a13-a07f-d903524c0546. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1449.957639] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Acquiring lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.045420] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquiring lock "refresh_cache-d4230edc-cfda-4b9f-ab42-2f39c699ff03" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.047042] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquired lock "refresh_cache-d4230edc-cfda-4b9f-ab42-2f39c699ff03" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.047042] env[62619]: DEBUG nova.network.neutron [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1450.234578] env[62619]: DEBUG oslo_concurrency.lockutils [req-95e26125-fbc5-48b7-9145-ab3613b3d01f req-e880083a-1bfa-469f-b1f7-ca8f0111c5b7 service nova] Releasing lock "refresh_cache-2a41be15-efaf-4e78-a278-2711cb11e98f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.401273] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Releasing lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.401928] env[62619]: DEBUG nova.compute.manager [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Instance network_info: |[{"id": "467d9eda-baf0-4ee1-b652-587781aeb8e3", "address": "fa:16:3e:c6:8a:94", "network": {"id": "869ae219-0e41-4a92-a73d-09ecc0a7efe6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1589566262", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap467d9eda-ba", "ovs_interfaceid": "467d9eda-baf0-4ee1-b652-587781aeb8e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc867071-21de-47e5-b245-cdd32ff75559", "address": "fa:16:3e:32:6b:c3", "network": {"id": "4b2ae7d4-762d-4f1c-8ed8-ce0de4df0c2f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1674421522", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc867071-21", "ovs_interfaceid": "fc867071-21de-47e5-b245-cdd32ff75559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bfe09361-c134-4a13-a07f-d903524c0546", "address": "fa:16:3e:ec:03:da", "network": {"id": "869ae219-0e41-4a92-a73d-09ecc0a7efe6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1589566262", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfe09361-c1", "ovs_interfaceid": "bfe09361-c134-4a13-a07f-d903524c0546", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1450.402401] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Acquired lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.402611] env[62619]: DEBUG nova.network.neutron [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Refreshing network info cache for port bfe09361-c134-4a13-a07f-d903524c0546 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1450.404571] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:8a:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da0e5087-d65b-416f-90fe-beaa9c534ad3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '467d9eda-baf0-4ee1-b652-587781aeb8e3', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:6b:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '47499d09-8010-4d02-ac96-4f057c104692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc867071-21de-47e5-b245-cdd32ff75559', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:03:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da0e5087-d65b-416f-90fe-beaa9c534ad3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bfe09361-c134-4a13-a07f-d903524c0546', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1450.419389] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Creating folder: Project (8311e2dca4814727b91967833796fc66). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1450.429421] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-787c16a0-5d29-410e-a4cc-f7e885a90247 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.438105] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777370, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.442664] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Created folder: Project (8311e2dca4814727b91967833796fc66) in parent group-v368875. [ 1450.442932] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Creating folder: Instances. Parent ref: group-v368957. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1450.443426] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-534f5725-1a98-440f-8e59-6f3e515c7e1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.456575] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Created folder: Instances in parent group-v368957. [ 1450.456922] env[62619]: DEBUG oslo.service.loopingcall [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.457046] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1450.458308] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7933dc2a-8b2a-4723-991d-9c1efb6301a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.488189] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1450.488189] env[62619]: value = "task-1777373" [ 1450.488189] env[62619]: _type = "Task" [ 1450.488189] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.494664] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777373, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.523597] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449d5c9f-ce8e-4c7d-92a2-cf5d7a2bb4ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.534478] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb555f7f-d69c-4487-88af-eb1c94cdf710 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.570236] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b84c2f8-fd5d-4be6-a0ba-df9aac377479 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.578981] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48acec19-1468-4506-b65b-bb035aa7b95a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.594304] env[62619]: DEBUG nova.compute.provider_tree [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.635969] env[62619]: DEBUG nova.network.neutron [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1450.720682] env[62619]: DEBUG nova.network.neutron [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Updated VIF entry in instance network info cache for port bfe09361-c134-4a13-a07f-d903524c0546. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1450.721348] env[62619]: DEBUG nova.network.neutron [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Updating instance_info_cache with network_info: [{"id": "467d9eda-baf0-4ee1-b652-587781aeb8e3", "address": "fa:16:3e:c6:8a:94", "network": {"id": "869ae219-0e41-4a92-a73d-09ecc0a7efe6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1589566262", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap467d9eda-ba", "ovs_interfaceid": "467d9eda-baf0-4ee1-b652-587781aeb8e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc867071-21de-47e5-b245-cdd32ff75559", "address": "fa:16:3e:32:6b:c3", "network": {"id": "4b2ae7d4-762d-4f1c-8ed8-ce0de4df0c2f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1674421522", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc867071-21", "ovs_interfaceid": "fc867071-21de-47e5-b245-cdd32ff75559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bfe09361-c134-4a13-a07f-d903524c0546", "address": "fa:16:3e:ec:03:da", "network": {"id": "869ae219-0e41-4a92-a73d-09ecc0a7efe6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1589566262", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfe09361-c1", "ovs_interfaceid": "bfe09361-c134-4a13-a07f-d903524c0546", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.727532] env[62619]: DEBUG nova.network.neutron [-] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.792553] env[62619]: DEBUG nova.compute.manager [req-6efa575d-cb11-402c-ba94-30bd49146e90 req-f52ed34f-5052-499d-b3cf-7b038610569f service nova] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Received event network-vif-deleted-789bc80f-8a7f-49a0-8500-7c2ea007446c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1450.914478] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777370, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553531} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.914910] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 2a41be15-efaf-4e78-a278-2711cb11e98f/2a41be15-efaf-4e78-a278-2711cb11e98f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1450.915334] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1450.915459] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7dc993a4-20c1-4cab-9800-dc1c3d4625c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.922559] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1450.922559] env[62619]: value = "task-1777374" [ 1450.922559] env[62619]: _type = "Task" [ 1450.922559] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.926818] env[62619]: DEBUG nova.network.neutron [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Updating instance_info_cache with network_info: [{"id": "a1723b81-03da-4080-8d54-cacb839c3f1d", "address": "fa:16:3e:e1:e8:1c", "network": {"id": "286bf2fd-17d0-45ea-a774-8e5dd54931d9", "bridge": "br-int", "label": "tempest-ServersTestJSON-33521734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b2c21c3d424352a680c5f9660a8d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1723b81-03", "ovs_interfaceid": "a1723b81-03da-4080-8d54-cacb839c3f1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.933488] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.997124] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777373, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.102921] env[62619]: DEBUG nova.scheduler.client.report [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1451.224131] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Releasing lock "refresh_cache-6be4f813-7171-4515-a728-5cf34665205a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.224430] env[62619]: DEBUG nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Received event network-changed-9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1451.224628] env[62619]: DEBUG nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Refreshing instance network info cache due to event network-changed-9463c863-5c42-4fc9-a0c8-a6c9de3bddcd. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1451.224839] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Acquiring lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.224975] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Acquired lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.225150] env[62619]: DEBUG nova.network.neutron [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Refreshing network info cache for port 9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1451.230148] env[62619]: INFO nova.compute.manager [-] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Took 1.33 seconds to deallocate network for instance. [ 1451.432788] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Releasing lock "refresh_cache-d4230edc-cfda-4b9f-ab42-2f39c699ff03" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.433112] env[62619]: DEBUG nova.compute.manager [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Instance network_info: |[{"id": "a1723b81-03da-4080-8d54-cacb839c3f1d", "address": "fa:16:3e:e1:e8:1c", "network": {"id": "286bf2fd-17d0-45ea-a774-8e5dd54931d9", "bridge": "br-int", "label": "tempest-ServersTestJSON-33521734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b2c21c3d424352a680c5f9660a8d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1723b81-03", "ovs_interfaceid": "a1723b81-03da-4080-8d54-cacb839c3f1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1451.433433] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106335} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.433974] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:e8:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1723b81-03da-4080-8d54-cacb839c3f1d', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1451.442256] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Creating folder: Project (c9b2c21c3d424352a680c5f9660a8d7f). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1451.442525] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1451.442799] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfa7ab45-2d8d-4911-b67e-238190e8a6b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.445078] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da65734f-620e-4276-9553-508c140fe9c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.470326] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 2a41be15-efaf-4e78-a278-2711cb11e98f/2a41be15-efaf-4e78-a278-2711cb11e98f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1451.471762] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac77dba4-21e6-4233-a37d-67a7ec7265df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.488581] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Created folder: Project (c9b2c21c3d424352a680c5f9660a8d7f) in parent group-v368875. [ 1451.488879] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Creating folder: Instances. Parent ref: group-v368960. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1451.488994] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1fd58eed-a700-41e0-9f59-2b6a072a7047 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.500728] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1451.500728] env[62619]: value = "task-1777376" [ 1451.500728] env[62619]: _type = "Task" [ 1451.500728] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.506999] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777373, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.508750] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Created folder: Instances in parent group-v368960. [ 1451.508991] env[62619]: DEBUG oslo.service.loopingcall [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1451.509204] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1451.509572] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d7939ab-4029-4e61-97e5-08f71d18b983 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.526905] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.531173] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1451.531173] env[62619]: value = "task-1777378" [ 1451.531173] env[62619]: _type = "Task" [ 1451.531173] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.538509] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777378, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.609730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.824s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.610297] env[62619]: DEBUG nova.compute.manager [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1451.612941] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.995s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.613174] env[62619]: DEBUG nova.objects.instance [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lazy-loading 'resources' on Instance uuid 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1451.698984] env[62619]: DEBUG nova.network.neutron [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Successfully updated port: a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1451.737108] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.998741] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777373, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.011630] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.041015] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777378, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.116752] env[62619]: DEBUG nova.compute.utils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1452.122783] env[62619]: DEBUG nova.compute.manager [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1452.123025] env[62619]: DEBUG nova.network.neutron [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1452.201129] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "refresh_cache-ef41dd29-1270-4071-9e89-20132131de2d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.201316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquired lock "refresh_cache-ef41dd29-1270-4071-9e89-20132131de2d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.201476] env[62619]: DEBUG nova.network.neutron [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1452.261096] env[62619]: DEBUG nova.policy [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a642f85f0187484480d998009032fb1d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a12b52b8da24c44806817fff5661ff0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1452.275373] env[62619]: DEBUG nova.network.neutron [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Updated VIF entry in instance network info cache for port 9463c863-5c42-4fc9-a0c8-a6c9de3bddcd. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1452.275778] env[62619]: DEBUG nova.network.neutron [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Updating instance_info_cache with network_info: [{"id": "9463c863-5c42-4fc9-a0c8-a6c9de3bddcd", "address": "fa:16:3e:31:14:da", "network": {"id": "f2a947a0-6efb-4fd5-9aa4-f9604a752455", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2058354215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90501fd522094b02a04da8bc54edbcde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9463c863-5c", "ovs_interfaceid": "9463c863-5c42-4fc9-a0c8-a6c9de3bddcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.399505] env[62619]: DEBUG nova.compute.manager [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Received event network-changed-a1723b81-03da-4080-8d54-cacb839c3f1d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1452.399709] env[62619]: DEBUG nova.compute.manager [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Refreshing instance network info cache due to event network-changed-a1723b81-03da-4080-8d54-cacb839c3f1d. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1452.399915] env[62619]: DEBUG oslo_concurrency.lockutils [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] Acquiring lock "refresh_cache-d4230edc-cfda-4b9f-ab42-2f39c699ff03" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.400068] env[62619]: DEBUG oslo_concurrency.lockutils [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] Acquired lock "refresh_cache-d4230edc-cfda-4b9f-ab42-2f39c699ff03" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.400260] env[62619]: DEBUG nova.network.neutron [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Refreshing network info cache for port a1723b81-03da-4080-8d54-cacb839c3f1d {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1452.499017] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777373, 'name': CreateVM_Task, 'duration_secs': 1.667286} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.500455] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1452.501299] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.501458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.501797] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1452.502156] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab3a7dbc-0045-459c-875b-fbbe7057b836 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.510431] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1452.510431] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52536bb7-74a9-ea0a-8da4-66ff4966a670" [ 1452.510431] env[62619]: _type = "Task" [ 1452.510431] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.514037] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777376, 'name': ReconfigVM_Task, 'duration_secs': 0.864999} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.517485] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 2a41be15-efaf-4e78-a278-2711cb11e98f/2a41be15-efaf-4e78-a278-2711cb11e98f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1452.518219] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36a211ed-252b-4adb-a327-d1862c2408db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.526016] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52536bb7-74a9-ea0a-8da4-66ff4966a670, 'name': SearchDatastore_Task, 'duration_secs': 0.010781} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.527305] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.527533] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1452.527758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.527890] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.528076] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1452.528378] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1452.528378] env[62619]: value = "task-1777379" [ 1452.528378] env[62619]: _type = "Task" [ 1452.528378] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.528561] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74c15a1b-23bf-444f-8517-9da182f619d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.544194] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777379, 'name': Rename_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.544586] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1452.544787] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1452.548987] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54a9270f-ad46-4efc-8ff7-167cce23ff0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.551365] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777378, 'name': CreateVM_Task, 'duration_secs': 0.587485} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.551606] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1452.552635] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.552804] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.553170] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1452.553470] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc291df1-050a-4fe7-947f-985ea9da344d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.558698] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1452.558698] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef493c-f366-ffeb-cd91-86af297f7c27" [ 1452.558698] env[62619]: _type = "Task" [ 1452.558698] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.563998] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for the task: (returnval){ [ 1452.563998] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b386a0-29c1-c1f3-898e-92dcb0624eba" [ 1452.563998] env[62619]: _type = "Task" [ 1452.563998] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.571014] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef493c-f366-ffeb-cd91-86af297f7c27, 'name': SearchDatastore_Task, 'duration_secs': 0.011706} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.575053] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5cf19f2-4a89-4ef2-8157-4fbc2ffada22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.580742] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b386a0-29c1-c1f3-898e-92dcb0624eba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.583840] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1452.583840] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52819b61-4831-c8e0-6efc-5e66e11cfa16" [ 1452.583840] env[62619]: _type = "Task" [ 1452.583840] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.594083] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52819b61-4831-c8e0-6efc-5e66e11cfa16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.624313] env[62619]: DEBUG nova.compute.manager [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1452.773017] env[62619]: DEBUG nova.network.neutron [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1452.779261] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Releasing lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.779564] env[62619]: DEBUG nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Received event network-vif-deleted-b4a31a23-98c4-445a-8fe4-36fc6013e543 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1452.779797] env[62619]: DEBUG nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Received event network-vif-plugged-a1723b81-03da-4080-8d54-cacb839c3f1d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1452.780018] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Acquiring lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.780295] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.780501] env[62619]: DEBUG oslo_concurrency.lockutils [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] Lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.780693] env[62619]: DEBUG nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] No waiting events found dispatching network-vif-plugged-a1723b81-03da-4080-8d54-cacb839c3f1d {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1452.780867] env[62619]: WARNING nova.compute.manager [req-7ac7488e-d41c-48f6-9d2b-78e5e90bd477 req-4c4dfeb1-9fdf-495e-a653-513c2d0417a1 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Received unexpected event network-vif-plugged-a1723b81-03da-4080-8d54-cacb839c3f1d for instance with vm_state building and task_state spawning. [ 1452.781847] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc572a1b-a56e-4fcc-9ecb-2d23806756bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.789479] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe864ad5-d6dd-4ce0-91e1-be1c5c15b080 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.829490] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71d3f8d-77b9-4416-b759-892606b92e4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.837284] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71c1ae1-55ab-4e83-9c6c-6c3416b16155 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.852414] env[62619]: DEBUG nova.compute.provider_tree [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1453.043564] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777379, 'name': Rename_Task, 'duration_secs': 0.169602} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.045121] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1453.045121] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90f0af17-e711-46ff-a195-56c7e89d6082 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.052165] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1453.052165] env[62619]: value = "task-1777380" [ 1453.052165] env[62619]: _type = "Task" [ 1453.052165] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.060769] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.074913] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b386a0-29c1-c1f3-898e-92dcb0624eba, 'name': SearchDatastore_Task, 'duration_secs': 0.018704} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.077699] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.077818] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1453.078140] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.096502] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52819b61-4831-c8e0-6efc-5e66e11cfa16, 'name': SearchDatastore_Task, 'duration_secs': 0.011197} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.096502] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.096813] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 6be4f813-7171-4515-a728-5cf34665205a/6be4f813-7171-4515-a728-5cf34665205a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1453.096978] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.097166] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1453.097377] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e203f6e-c088-4d45-ba99-34b066655ccc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.099368] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-493a8ed2-0882-4472-9450-0bbde3c603a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.106140] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1453.106140] env[62619]: value = "task-1777381" [ 1453.106140] env[62619]: _type = "Task" [ 1453.106140] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.114345] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1453.114345] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1453.115591] env[62619]: DEBUG nova.network.neutron [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Successfully created port: 649859d4-a599-4566-8f67-2a6e320625c0 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1453.119789] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e9271cb-21d8-4784-9655-e7ed0b9d4263 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.122197] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.127799] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for the task: (returnval){ [ 1453.127799] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e008b3-fe60-32f9-37fc-9402b8faf6e4" [ 1453.127799] env[62619]: _type = "Task" [ 1453.127799] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.139896] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e008b3-fe60-32f9-37fc-9402b8faf6e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.166126] env[62619]: DEBUG nova.network.neutron [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Updated VIF entry in instance network info cache for port a1723b81-03da-4080-8d54-cacb839c3f1d. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1453.166126] env[62619]: DEBUG nova.network.neutron [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Updating instance_info_cache with network_info: [{"id": "a1723b81-03da-4080-8d54-cacb839c3f1d", "address": "fa:16:3e:e1:e8:1c", "network": {"id": "286bf2fd-17d0-45ea-a774-8e5dd54931d9", "bridge": "br-int", "label": "tempest-ServersTestJSON-33521734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b2c21c3d424352a680c5f9660a8d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1723b81-03", "ovs_interfaceid": "a1723b81-03da-4080-8d54-cacb839c3f1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.332012] env[62619]: DEBUG nova.network.neutron [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Updating instance_info_cache with network_info: [{"id": "a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1", "address": "fa:16:3e:61:32:a4", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b4c0ed-8a", "ovs_interfaceid": "a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.356100] env[62619]: DEBUG nova.scheduler.client.report [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1453.563618] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777380, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.618768] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777381, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.640606] env[62619]: DEBUG nova.compute.manager [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1453.643190] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e008b3-fe60-32f9-37fc-9402b8faf6e4, 'name': SearchDatastore_Task, 'duration_secs': 0.012531} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.644378] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93c15de2-5092-4c42-bd55-0f855419b54d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.653613] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for the task: (returnval){ [ 1453.653613] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5267ade6-2c10-011b-5883-01bc88fb5fe4" [ 1453.653613] env[62619]: _type = "Task" [ 1453.653613] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.663267] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5267ade6-2c10-011b-5883-01bc88fb5fe4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.669085] env[62619]: DEBUG oslo_concurrency.lockutils [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] Releasing lock "refresh_cache-d4230edc-cfda-4b9f-ab42-2f39c699ff03" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.669343] env[62619]: DEBUG nova.compute.manager [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Received event network-vif-plugged-a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1453.669535] env[62619]: DEBUG oslo_concurrency.lockutils [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] Acquiring lock "ef41dd29-1270-4071-9e89-20132131de2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.669732] env[62619]: DEBUG oslo_concurrency.lockutils [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] Lock "ef41dd29-1270-4071-9e89-20132131de2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1453.669887] env[62619]: DEBUG oslo_concurrency.lockutils [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] Lock "ef41dd29-1270-4071-9e89-20132131de2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1453.670110] env[62619]: DEBUG nova.compute.manager [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: ef41dd29-1270-4071-9e89-20132131de2d] No waiting events found dispatching network-vif-plugged-a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1453.670269] env[62619]: WARNING nova.compute.manager [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Received unexpected event network-vif-plugged-a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1 for instance with vm_state building and task_state spawning. [ 1453.670448] env[62619]: DEBUG nova.compute.manager [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Received event network-changed-a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1453.670602] env[62619]: DEBUG nova.compute.manager [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Refreshing instance network info cache due to event network-changed-a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1453.670768] env[62619]: DEBUG oslo_concurrency.lockutils [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] Acquiring lock "refresh_cache-ef41dd29-1270-4071-9e89-20132131de2d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.673347] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1453.673582] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1453.673702] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1453.673810] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1453.674023] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1453.674113] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1453.674304] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1453.674460] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1453.674622] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1453.674775] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1453.674940] env[62619]: DEBUG nova.virt.hardware [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1453.675839] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7a78eb-f8ee-46ad-884a-e05b0b4d1f4f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.685916] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599fbf1a-ba18-4906-a44e-42c61a0a6917 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.836292] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Releasing lock "refresh_cache-ef41dd29-1270-4071-9e89-20132131de2d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.836720] env[62619]: DEBUG nova.compute.manager [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Instance network_info: |[{"id": "a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1", "address": "fa:16:3e:61:32:a4", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b4c0ed-8a", "ovs_interfaceid": "a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1453.837054] env[62619]: DEBUG oslo_concurrency.lockutils [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] Acquired lock "refresh_cache-ef41dd29-1270-4071-9e89-20132131de2d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.837247] env[62619]: DEBUG nova.network.neutron [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Refreshing network info cache for port a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1453.838475] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:32:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1453.846916] env[62619]: DEBUG oslo.service.loopingcall [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.848017] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1453.848256] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c6a0e68-c1e0-44c8-908b-984020c82eda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.864449] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.251s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1453.866592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 21.381s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1453.875214] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1453.875214] env[62619]: value = "task-1777382" [ 1453.875214] env[62619]: _type = "Task" [ 1453.875214] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.884340] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777382, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.893284] env[62619]: INFO nova.scheduler.client.report [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Deleted allocations for instance 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41 [ 1454.062392] env[62619]: DEBUG oslo_vmware.api [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777380, 'name': PowerOnVM_Task, 'duration_secs': 0.87602} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.062673] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1454.062867] env[62619]: INFO nova.compute.manager [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Took 9.66 seconds to spawn the instance on the hypervisor. [ 1454.063051] env[62619]: DEBUG nova.compute.manager [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1454.063868] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe77b96-0e78-4cc6-975b-0e89dbdf4b2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.118691] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777381, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655115} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.118955] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 6be4f813-7171-4515-a728-5cf34665205a/6be4f813-7171-4515-a728-5cf34665205a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1454.119177] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1454.119430] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-edbc38e8-11c3-407c-97a8-320a97137c6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.125743] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1454.125743] env[62619]: value = "task-1777383" [ 1454.125743] env[62619]: _type = "Task" [ 1454.125743] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.135378] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777383, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.165265] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5267ade6-2c10-011b-5883-01bc88fb5fe4, 'name': SearchDatastore_Task, 'duration_secs': 0.050132} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.165522] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.165804] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d4230edc-cfda-4b9f-ab42-2f39c699ff03/d4230edc-cfda-4b9f-ab42-2f39c699ff03.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1454.166121] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9845d28-bf61-4a24-92ad-ca9f5186b465 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.172512] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for the task: (returnval){ [ 1454.172512] env[62619]: value = "task-1777384" [ 1454.172512] env[62619]: _type = "Task" [ 1454.172512] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.181234] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777384, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.391283] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777382, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.405296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4ae5cd73-8e0c-485d-a34f-a26b5da76dd2 tempest-ServersNegativeTestMultiTenantJSON-214579437 tempest-ServersNegativeTestMultiTenantJSON-214579437-project-member] Lock "3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.302s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.583681] env[62619]: INFO nova.compute.manager [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Took 40.28 seconds to build instance. [ 1454.637668] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777383, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080212} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.637955] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1454.639095] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60ec824-d741-4e73-9576-79d619bdd7ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.671879] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 6be4f813-7171-4515-a728-5cf34665205a/6be4f813-7171-4515-a728-5cf34665205a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1454.672520] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c20c5d05-7ca1-486b-bef5-9b3a48fca74a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.698313] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777384, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.699952] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1454.699952] env[62619]: value = "task-1777385" [ 1454.699952] env[62619]: _type = "Task" [ 1454.699952] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.711542] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777385, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.878906] env[62619]: DEBUG nova.network.neutron [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Updated VIF entry in instance network info cache for port a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1454.879225] env[62619]: DEBUG nova.network.neutron [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Updating instance_info_cache with network_info: [{"id": "a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1", "address": "fa:16:3e:61:32:a4", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b4c0ed-8a", "ovs_interfaceid": "a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1454.881277] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Applying migration context for instance aa4906f1-e801-4df0-819e-8c5fb5930fb5 as it has an incoming, in-progress migration 610837f7-e212-4892-aec3-980eeb410f02. Migration status is confirming {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1454.884690] env[62619]: INFO nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating resource usage from migration 610837f7-e212-4892-aec3-980eeb410f02 [ 1454.898922] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777382, 'name': CreateVM_Task, 'duration_secs': 0.82863} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.899213] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1454.899870] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1454.900481] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.901319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1454.901603] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0a7f0df-ec89-45ab-a522-227978b5f0e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.909436] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1454.909436] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ebb956-b334-9049-5d14-e44851afb708" [ 1454.909436] env[62619]: _type = "Task" [ 1454.909436] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.914201] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ac03bcf3-61df-4557-8018-0ad54ef30f17 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.914201] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4763e489-5aeb-4dc0-b327-b79a55afdfe3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.914284] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ed34ae20-a891-45aa-8124-f36f264937f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.914382] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance fb231b38-950e-4c86-bfe5-4c10a304910f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.914454] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4ee81568-ad9a-4ded-b6fe-15503d85968e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.914583] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 312aed5b-a66e-4428-ac1b-483dc2b38291 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1454.914701] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e9ca5148-f188-4a15-83ae-8f3d730b0dab is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1454.914817] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance d16bebd1-a144-4d73-8eb6-8ab12a08fe69 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1454.914931] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e81c03f7-9c0e-46bd-9641-aced82038eca is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1454.915195] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.915195] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4374c102-a6fe-45ef-ad49-a1295f96899a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1454.915336] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 91ce0ab3-4fa4-4992-995a-0baeec91d9d0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1454.915417] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance b6aae13f-0711-4421-9d55-de7ece3e4b89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.915521] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.915622] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 80363e16-5dd2-42ad-9ead-25b121d62211 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.915734] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 7c058337-1684-4553-8e96-dd2cd1814a15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.916790] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Migration 610837f7-e212-4892-aec3-980eeb410f02 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1454.916790] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance aa4906f1-e801-4df0-819e-8c5fb5930fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.916790] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 6be4f813-7171-4515-a728-5cf34665205a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.916790] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 2a41be15-efaf-4e78-a278-2711cb11e98f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.916790] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance d4230edc-cfda-4b9f-ab42-2f39c699ff03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.916790] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ef41dd29-1270-4071-9e89-20132131de2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.916790] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 94c9a119-5c04-4550-b55d-a4a2985385d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1454.923092] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ebb956-b334-9049-5d14-e44851afb708, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.088672] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff193247-a35f-42a4-be8a-4d46f42d9167 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "2a41be15-efaf-4e78-a278-2711cb11e98f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.708s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.127447] env[62619]: DEBUG nova.compute.manager [req-5fdb9719-afc3-49f6-bf94-e4b73c179b5b req-8a91b092-afda-4156-8e5d-22013553b9c5 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Received event network-changed-9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1455.127652] env[62619]: DEBUG nova.compute.manager [req-5fdb9719-afc3-49f6-bf94-e4b73c179b5b req-8a91b092-afda-4156-8e5d-22013553b9c5 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Refreshing instance network info cache due to event network-changed-9463c863-5c42-4fc9-a0c8-a6c9de3bddcd. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1455.127864] env[62619]: DEBUG oslo_concurrency.lockutils [req-5fdb9719-afc3-49f6-bf94-e4b73c179b5b req-8a91b092-afda-4156-8e5d-22013553b9c5 service nova] Acquiring lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.128035] env[62619]: DEBUG oslo_concurrency.lockutils [req-5fdb9719-afc3-49f6-bf94-e4b73c179b5b req-8a91b092-afda-4156-8e5d-22013553b9c5 service nova] Acquired lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.128179] env[62619]: DEBUG nova.network.neutron [req-5fdb9719-afc3-49f6-bf94-e4b73c179b5b req-8a91b092-afda-4156-8e5d-22013553b9c5 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Refreshing network info cache for port 9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1455.201065] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777384, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617928} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.201065] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d4230edc-cfda-4b9f-ab42-2f39c699ff03/d4230edc-cfda-4b9f-ab42-2f39c699ff03.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1455.201065] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1455.201065] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79afc429-081f-481d-9b1a-4d577e55b5d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.216523] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777385, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.217920] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for the task: (returnval){ [ 1455.217920] env[62619]: value = "task-1777386" [ 1455.217920] env[62619]: _type = "Task" [ 1455.217920] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.226505] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777386, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.340618] env[62619]: DEBUG nova.network.neutron [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Successfully updated port: 649859d4-a599-4566-8f67-2a6e320625c0 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1455.390055] env[62619]: DEBUG oslo_concurrency.lockutils [req-9eb17d3e-70f0-4df4-bb84-922a7b20fd5b req-eaa9edbd-dcf1-4b65-bf40-a56187144f69 service nova] Releasing lock "refresh_cache-ef41dd29-1270-4071-9e89-20132131de2d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.419614] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 9014ef05-64d1-4bd6-9f2e-db58003b6520 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1455.425946] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ebb956-b334-9049-5d14-e44851afb708, 'name': SearchDatastore_Task, 'duration_secs': 0.016938} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.426639] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.426922] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1455.427195] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.430021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.430021] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1455.430021] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3065852-8ffd-4c4a-bc63-7ee35fe848f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.438426] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1455.438591] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1455.439345] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a858ac35-1fe9-461a-b356-9f4319477285 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.445423] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1455.445423] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52961bee-ac30-6d4a-895d-edc6fb5f930b" [ 1455.445423] env[62619]: _type = "Task" [ 1455.445423] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.453960] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52961bee-ac30-6d4a-895d-edc6fb5f930b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.590504] env[62619]: DEBUG nova.compute.manager [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1455.714770] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777385, 'name': ReconfigVM_Task, 'duration_secs': 0.688621} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.715554] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 6be4f813-7171-4515-a728-5cf34665205a/6be4f813-7171-4515-a728-5cf34665205a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1455.716762] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7277f6e-ad9e-4164-b613-2ef900fa6088 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.728441] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777386, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133357} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.732201] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1455.732201] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1455.732201] env[62619]: value = "task-1777387" [ 1455.732201] env[62619]: _type = "Task" [ 1455.732201] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.732201] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b483a607-6aae-41ba-ab0a-dbf9522571b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.745016] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777387, 'name': Rename_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.790888] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] d4230edc-cfda-4b9f-ab42-2f39c699ff03/d4230edc-cfda-4b9f-ab42-2f39c699ff03.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1455.792674] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e8e2a42-3996-45c6-84ba-e947d73784e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.822016] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for the task: (returnval){ [ 1455.822016] env[62619]: value = "task-1777388" [ 1455.822016] env[62619]: _type = "Task" [ 1455.822016] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.832567] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777388, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.844839] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "refresh_cache-94c9a119-5c04-4550-b55d-a4a2985385d3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.844839] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquired lock "refresh_cache-94c9a119-5c04-4550-b55d-a4a2985385d3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.844989] env[62619]: DEBUG nova.network.neutron [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1455.927869] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 40eeb844-7423-4818-8095-81062c7e6392 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1455.956153] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52961bee-ac30-6d4a-895d-edc6fb5f930b, 'name': SearchDatastore_Task, 'duration_secs': 0.01789} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.956921] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bfe908d-5359-420f-9821-77cce9dc5ece {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.962580] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1455.962580] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52936b02-5fd1-c549-202f-cea988c501c9" [ 1455.962580] env[62619]: _type = "Task" [ 1455.962580] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.972680] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52936b02-5fd1-c549-202f-cea988c501c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.098705] env[62619]: DEBUG nova.network.neutron [req-5fdb9719-afc3-49f6-bf94-e4b73c179b5b req-8a91b092-afda-4156-8e5d-22013553b9c5 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Updated VIF entry in instance network info cache for port 9463c863-5c42-4fc9-a0c8-a6c9de3bddcd. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1456.099141] env[62619]: DEBUG nova.network.neutron [req-5fdb9719-afc3-49f6-bf94-e4b73c179b5b req-8a91b092-afda-4156-8e5d-22013553b9c5 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Updating instance_info_cache with network_info: [{"id": "9463c863-5c42-4fc9-a0c8-a6c9de3bddcd", "address": "fa:16:3e:31:14:da", "network": {"id": "f2a947a0-6efb-4fd5-9aa4-f9604a752455", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2058354215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90501fd522094b02a04da8bc54edbcde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9463c863-5c", "ovs_interfaceid": "9463c863-5c42-4fc9-a0c8-a6c9de3bddcd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.125749] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.246638] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777387, 'name': Rename_Task, 'duration_secs': 0.233417} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.246638] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1456.246811] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-190a386e-5578-46c0-b70d-831039319ef8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.254929] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1456.254929] env[62619]: value = "task-1777389" [ 1456.254929] env[62619]: _type = "Task" [ 1456.254929] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.264390] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777389, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.331972] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777388, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.392783] env[62619]: DEBUG nova.network.neutron [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1456.431393] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 597c0f95-5798-4022-8e2e-89a700698d7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1456.474929] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52936b02-5fd1-c549-202f-cea988c501c9, 'name': SearchDatastore_Task, 'duration_secs': 0.018285} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.474929] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.475136] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ef41dd29-1270-4071-9e89-20132131de2d/ef41dd29-1270-4071-9e89-20132131de2d.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1456.475861] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31069751-892e-4dc2-8571-42ce5c47f336 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.482628] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1456.482628] env[62619]: value = "task-1777390" [ 1456.482628] env[62619]: _type = "Task" [ 1456.482628] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.493286] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777390, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.604309] env[62619]: DEBUG oslo_concurrency.lockutils [req-5fdb9719-afc3-49f6-bf94-e4b73c179b5b req-8a91b092-afda-4156-8e5d-22013553b9c5 service nova] Releasing lock "refresh_cache-7c058337-1684-4553-8e96-dd2cd1814a15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.652160] env[62619]: DEBUG nova.network.neutron [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Updating instance_info_cache with network_info: [{"id": "649859d4-a599-4566-8f67-2a6e320625c0", "address": "fa:16:3e:45:74:30", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap649859d4-a5", "ovs_interfaceid": "649859d4-a599-4566-8f67-2a6e320625c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.769343] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777389, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.843344] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777388, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.911316] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b6959b-a192-4f7f-b038-ac309e0a8ce5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.921512] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ece9e570-cbf9-4b91-8bd4-e90b1e904837 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Suspending the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1456.922112] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d8736508-682f-427f-ab03-e03f1722e3db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.933994] env[62619]: DEBUG oslo_vmware.api [None req-ece9e570-cbf9-4b91-8bd4-e90b1e904837 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1456.933994] env[62619]: value = "task-1777391" [ 1456.933994] env[62619]: _type = "Task" [ 1456.933994] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.939607] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cef20063-96f0-46cc-9f7d-4436b60216c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1456.949186] env[62619]: DEBUG oslo_vmware.api [None req-ece9e570-cbf9-4b91-8bd4-e90b1e904837 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777391, 'name': SuspendVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.995107] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777390, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.155017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Releasing lock "refresh_cache-94c9a119-5c04-4550-b55d-a4a2985385d3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.155497] env[62619]: DEBUG nova.compute.manager [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Instance network_info: |[{"id": "649859d4-a599-4566-8f67-2a6e320625c0", "address": "fa:16:3e:45:74:30", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap649859d4-a5", "ovs_interfaceid": "649859d4-a599-4566-8f67-2a6e320625c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1457.155939] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:74:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '649859d4-a599-4566-8f67-2a6e320625c0', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1457.169849] env[62619]: DEBUG oslo.service.loopingcall [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.170190] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1457.170718] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f573589-9648-430f-b55a-f0232e14d19d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.197269] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1457.197269] env[62619]: value = "task-1777392" [ 1457.197269] env[62619]: _type = "Task" [ 1457.197269] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.210658] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777392, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.268107] env[62619]: DEBUG oslo_vmware.api [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777389, 'name': PowerOnVM_Task, 'duration_secs': 1.004779} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.269671] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1457.270033] env[62619]: INFO nova.compute.manager [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Took 17.88 seconds to spawn the instance on the hypervisor. [ 1457.270250] env[62619]: DEBUG nova.compute.manager [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1457.271712] env[62619]: DEBUG nova.compute.manager [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Received event network-vif-plugged-649859d4-a599-4566-8f67-2a6e320625c0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1457.271918] env[62619]: DEBUG oslo_concurrency.lockutils [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] Acquiring lock "94c9a119-5c04-4550-b55d-a4a2985385d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.272136] env[62619]: DEBUG oslo_concurrency.lockutils [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] Lock "94c9a119-5c04-4550-b55d-a4a2985385d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.272349] env[62619]: DEBUG oslo_concurrency.lockutils [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] Lock "94c9a119-5c04-4550-b55d-a4a2985385d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.272530] env[62619]: DEBUG nova.compute.manager [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] No waiting events found dispatching network-vif-plugged-649859d4-a599-4566-8f67-2a6e320625c0 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1457.272741] env[62619]: WARNING nova.compute.manager [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Received unexpected event network-vif-plugged-649859d4-a599-4566-8f67-2a6e320625c0 for instance with vm_state building and task_state spawning. [ 1457.272833] env[62619]: DEBUG nova.compute.manager [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Received event network-changed-649859d4-a599-4566-8f67-2a6e320625c0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1457.272980] env[62619]: DEBUG nova.compute.manager [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Refreshing instance network info cache due to event network-changed-649859d4-a599-4566-8f67-2a6e320625c0. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1457.273175] env[62619]: DEBUG oslo_concurrency.lockutils [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] Acquiring lock "refresh_cache-94c9a119-5c04-4550-b55d-a4a2985385d3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.273308] env[62619]: DEBUG oslo_concurrency.lockutils [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] Acquired lock "refresh_cache-94c9a119-5c04-4550-b55d-a4a2985385d3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.273458] env[62619]: DEBUG nova.network.neutron [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Refreshing network info cache for port 649859d4-a599-4566-8f67-2a6e320625c0 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1457.276570] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e0168a-ba57-4aa5-8385-d6ccb05efa4d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.334739] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777388, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.445652] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 060427a2-e724-4c51-879e-675154ae5df2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1457.446135] env[62619]: DEBUG oslo_vmware.api [None req-ece9e570-cbf9-4b91-8bd4-e90b1e904837 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777391, 'name': SuspendVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.494261] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777390, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.684463} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.494553] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ef41dd29-1270-4071-9e89-20132131de2d/ef41dd29-1270-4071-9e89-20132131de2d.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1457.494940] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1457.495247] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a16f03e-4818-4f64-b07d-af185f1b3de1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.503580] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1457.503580] env[62619]: value = "task-1777393" [ 1457.503580] env[62619]: _type = "Task" [ 1457.503580] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.512579] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.711948] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777392, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.798238] env[62619]: INFO nova.compute.manager [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Took 52.70 seconds to build instance. [ 1457.834713] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777388, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.950227] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance da806d3f-79f0-4188-a2d8-0beeb9dfec1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1457.951028] env[62619]: DEBUG oslo_vmware.api [None req-ece9e570-cbf9-4b91-8bd4-e90b1e904837 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777391, 'name': SuspendVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.016967] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075333} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.019800] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1458.021739] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2134811-2a91-4ed5-8ca4-b7ccb442e35c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.059091] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] ef41dd29-1270-4071-9e89-20132131de2d/ef41dd29-1270-4071-9e89-20132131de2d.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1458.063254] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15bc8964-2555-465f-9ef1-1fd47f468490 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.086458] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1458.086458] env[62619]: value = "task-1777394" [ 1458.086458] env[62619]: _type = "Task" [ 1458.086458] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.096458] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777394, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.213055] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777392, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.245902] env[62619]: DEBUG nova.network.neutron [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Updated VIF entry in instance network info cache for port 649859d4-a599-4566-8f67-2a6e320625c0. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1458.246312] env[62619]: DEBUG nova.network.neutron [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Updating instance_info_cache with network_info: [{"id": "649859d4-a599-4566-8f67-2a6e320625c0", "address": "fa:16:3e:45:74:30", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap649859d4-a5", "ovs_interfaceid": "649859d4-a599-4566-8f67-2a6e320625c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.260591] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquiring lock "7c058337-1684-4553-8e96-dd2cd1814a15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.260856] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lock "7c058337-1684-4553-8e96-dd2cd1814a15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.261071] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquiring lock "7c058337-1684-4553-8e96-dd2cd1814a15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.261257] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lock "7c058337-1684-4553-8e96-dd2cd1814a15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.261420] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lock "7c058337-1684-4553-8e96-dd2cd1814a15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.264265] env[62619]: INFO nova.compute.manager [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Terminating instance [ 1458.299768] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d876626c-0677-4c2c-bf58-0bafd8425889 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "6be4f813-7171-4515-a728-5cf34665205a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.202s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.336576] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777388, 'name': ReconfigVM_Task, 'duration_secs': 2.449232} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.337272] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Reconfigured VM instance instance-0000001f to attach disk [datastore1] d4230edc-cfda-4b9f-ab42-2f39c699ff03/d4230edc-cfda-4b9f-ab42-2f39c699ff03.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1458.337561] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77e40a61-0746-4baf-9051-48983461577e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.346414] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for the task: (returnval){ [ 1458.346414] env[62619]: value = "task-1777395" [ 1458.346414] env[62619]: _type = "Task" [ 1458.346414] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.360263] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777395, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.449528] env[62619]: DEBUG oslo_vmware.api [None req-ece9e570-cbf9-4b91-8bd4-e90b1e904837 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777391, 'name': SuspendVM_Task, 'duration_secs': 1.045514} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.452332] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ece9e570-cbf9-4b91-8bd4-e90b1e904837 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Suspended the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1458.452332] env[62619]: DEBUG nova.compute.manager [None req-ece9e570-cbf9-4b91-8bd4-e90b1e904837 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1458.452332] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5beec7-63b3-4e29-b891-0d240da8a66d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.453834] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e32cb991-a018-4b55-8cdf-378e212c8434 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1458.598683] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777394, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.693219] env[62619]: DEBUG oslo_vmware.rw_handles [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525843a5-0ffb-dcc6-05d8-a65823d4ce27/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1458.694303] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7c0fe0-d8c0-4164-a51d-995bf80d883f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.703493] env[62619]: DEBUG oslo_vmware.rw_handles [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525843a5-0ffb-dcc6-05d8-a65823d4ce27/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1458.703657] env[62619]: ERROR oslo_vmware.rw_handles [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525843a5-0ffb-dcc6-05d8-a65823d4ce27/disk-0.vmdk due to incomplete transfer. [ 1458.704260] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3361a3ea-d820-46d3-8505-9db7e69ab878 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.709236] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777392, 'name': CreateVM_Task, 'duration_secs': 1.410954} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.709400] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1458.710093] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.710266] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.710570] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1458.710812] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d3a9a33-17d3-4892-853d-c1d0e4397e32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.713453] env[62619]: DEBUG oslo_vmware.rw_handles [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525843a5-0ffb-dcc6-05d8-a65823d4ce27/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1458.713656] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Uploaded image 59235677-cc77-476d-b578-0801f2854047 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1458.715973] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1458.716541] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-267becb3-8ba1-434f-9bac-7ad3985b0ff4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.719441] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1458.719441] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52510552-08f6-95c2-0ab5-9564259debdf" [ 1458.719441] env[62619]: _type = "Task" [ 1458.719441] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.725581] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1458.725581] env[62619]: value = "task-1777396" [ 1458.725581] env[62619]: _type = "Task" [ 1458.725581] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.732786] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52510552-08f6-95c2-0ab5-9564259debdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.738163] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777396, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.748807] env[62619]: DEBUG oslo_concurrency.lockutils [req-13564bda-4b52-433f-bda3-c3392738d27e req-c88859ba-8615-491c-8c6e-11984bdf4013 service nova] Releasing lock "refresh_cache-94c9a119-5c04-4550-b55d-a4a2985385d3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.768416] env[62619]: DEBUG nova.compute.manager [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1458.768641] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.769498] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d221f929-f4a9-4d65-bd96-5366a5a7ebb9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.777903] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1458.778194] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4baf95b4-4cbd-4ed0-be9d-38304b1c5701 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.785475] env[62619]: DEBUG oslo_vmware.api [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for the task: (returnval){ [ 1458.785475] env[62619]: value = "task-1777397" [ 1458.785475] env[62619]: _type = "Task" [ 1458.785475] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.794860] env[62619]: DEBUG oslo_vmware.api [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777397, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.802905] env[62619]: DEBUG nova.compute.manager [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1458.856958] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777395, 'name': Rename_Task, 'duration_secs': 0.248362} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.857542] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1458.857806] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3dc5d5e2-1f24-45f1-86d0-4ed67bbf649d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.864471] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for the task: (returnval){ [ 1458.864471] env[62619]: value = "task-1777398" [ 1458.864471] env[62619]: _type = "Task" [ 1458.864471] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.873976] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.957195] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1459.099113] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777394, 'name': ReconfigVM_Task, 'duration_secs': 0.58524} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.099397] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Reconfigured VM instance instance-00000020 to attach disk [datastore1] ef41dd29-1270-4071-9e89-20132131de2d/ef41dd29-1270-4071-9e89-20132131de2d.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1459.100067] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09ebd233-2680-4717-8288-d3da0c9cc15d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.107400] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1459.107400] env[62619]: value = "task-1777399" [ 1459.107400] env[62619]: _type = "Task" [ 1459.107400] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.115914] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777399, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.230823] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52510552-08f6-95c2-0ab5-9564259debdf, 'name': SearchDatastore_Task, 'duration_secs': 0.014834} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.236070] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.237170] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1459.237170] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.237170] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.237170] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1459.237170] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e02d1fde-1a97-48a7-a33a-9dbfbdae65ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.245050] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777396, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.249063] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1459.249063] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1459.249063] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e893216-1dac-49d7-a402-5fa0f73d24b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.255018] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1459.255018] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e18d62-977b-a8a3-0963-59400ebb1223" [ 1459.255018] env[62619]: _type = "Task" [ 1459.255018] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.263512] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e18d62-977b-a8a3-0963-59400ebb1223, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.298235] env[62619]: DEBUG oslo_vmware.api [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777397, 'name': PowerOffVM_Task, 'duration_secs': 0.259208} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.298235] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1459.298591] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1459.298978] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb2978f4-8690-4cb3-bd78-c48248e9dbe1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.326931] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.375607] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777398, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.436720] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "6be4f813-7171-4515-a728-5cf34665205a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.436945] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "6be4f813-7171-4515-a728-5cf34665205a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.437180] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "6be4f813-7171-4515-a728-5cf34665205a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.437366] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "6be4f813-7171-4515-a728-5cf34665205a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.437566] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "6be4f813-7171-4515-a728-5cf34665205a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.439959] env[62619]: INFO nova.compute.manager [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Terminating instance [ 1459.460456] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 7217d898-54ee-46ed-88fa-959c38e988e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1459.547574] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1459.547907] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1459.548256] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Deleting the datastore file [datastore1] 7c058337-1684-4553-8e96-dd2cd1814a15 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1459.548529] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71621de3-a979-449b-a2a8-6e6dbe61e836 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.555954] env[62619]: DEBUG oslo_vmware.api [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for the task: (returnval){ [ 1459.555954] env[62619]: value = "task-1777401" [ 1459.555954] env[62619]: _type = "Task" [ 1459.555954] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.565318] env[62619]: DEBUG oslo_vmware.api [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777401, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.617614] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777399, 'name': Rename_Task, 'duration_secs': 0.255796} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.617900] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1459.618153] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2c35051-c2ab-4197-97a1-05f837b52a93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.625474] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1459.625474] env[62619]: value = "task-1777402" [ 1459.625474] env[62619]: _type = "Task" [ 1459.625474] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.635197] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777402, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.743037] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777396, 'name': Destroy_Task, 'duration_secs': 0.634346} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.743361] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Destroyed the VM [ 1459.743721] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1459.743983] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c4bb87af-45d1-4344-b9b9-f6c64fdb305d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.752802] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1459.752802] env[62619]: value = "task-1777403" [ 1459.752802] env[62619]: _type = "Task" [ 1459.752802] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.764672] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777403, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.768179] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e18d62-977b-a8a3-0963-59400ebb1223, 'name': SearchDatastore_Task, 'duration_secs': 0.024394} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.768925] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bdf7073-f4a6-4060-90f6-f88872c03a80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.774952] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1459.774952] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523fd289-152d-9c1c-ca4c-e873d818a25b" [ 1459.774952] env[62619]: _type = "Task" [ 1459.774952] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.784096] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523fd289-152d-9c1c-ca4c-e873d818a25b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.878917] env[62619]: DEBUG oslo_vmware.api [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777398, 'name': PowerOnVM_Task, 'duration_secs': 0.716735} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.879262] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1459.879510] env[62619]: INFO nova.compute.manager [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Took 12.78 seconds to spawn the instance on the hypervisor. [ 1459.879702] env[62619]: DEBUG nova.compute.manager [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1459.880542] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7a4f64-333f-4e57-8bc8-88db0457701e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.945226] env[62619]: DEBUG nova.compute.manager [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1459.945226] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1459.945494] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f961e7-9483-41f1-aca8-ec87c82cff2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.957700] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1459.957700] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e43c6d2-4c03-42eb-8739-abfe61d98c68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.963349] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance a802534f-1766-4ea9-9188-803ef197d775 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1459.966771] env[62619]: DEBUG oslo_vmware.api [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1459.966771] env[62619]: value = "task-1777404" [ 1459.966771] env[62619]: _type = "Task" [ 1459.966771] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.978578] env[62619]: DEBUG oslo_vmware.api [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777404, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.070615] env[62619]: DEBUG oslo_vmware.api [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Task: {'id': task-1777401, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268384} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.070970] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1460.070970] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1460.070970] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1460.070970] env[62619]: INFO nova.compute.manager [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1460.072014] env[62619]: DEBUG oslo.service.loopingcall [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1460.072775] env[62619]: DEBUG nova.compute.manager [-] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1460.072775] env[62619]: DEBUG nova.network.neutron [-] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1460.138513] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777402, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.266661] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777403, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.297044] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523fd289-152d-9c1c-ca4c-e873d818a25b, 'name': SearchDatastore_Task, 'duration_secs': 0.011082} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.297044] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.297044] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 94c9a119-5c04-4550-b55d-a4a2985385d3/94c9a119-5c04-4550-b55d-a4a2985385d3.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1460.297044] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-517c98e9-9661-4cf1-ab8a-50ff0850e61e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.302021] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1460.302021] env[62619]: value = "task-1777405" [ 1460.302021] env[62619]: _type = "Task" [ 1460.302021] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.311179] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777405, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.405467] env[62619]: INFO nova.compute.manager [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Took 40.57 seconds to build instance. [ 1460.468262] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ca5f5f6b-5303-4af4-adaa-e4aac72a90f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1460.487993] env[62619]: DEBUG oslo_vmware.api [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777404, 'name': PowerOffVM_Task, 'duration_secs': 0.208228} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.488386] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1460.488667] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1460.489120] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e92df2f6-0975-47cc-8fb0-28f8941d022b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.639373] env[62619]: DEBUG oslo_vmware.api [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777402, 'name': PowerOnVM_Task, 'duration_secs': 0.906364} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.639657] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1460.639890] env[62619]: INFO nova.compute.manager [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Took 10.85 seconds to spawn the instance on the hypervisor. [ 1460.640079] env[62619]: DEBUG nova.compute.manager [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1460.641146] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95df897b-194a-4767-b3d1-c3bd165c42f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.743820] env[62619]: DEBUG nova.compute.manager [req-b9d8f5b6-37d2-44b8-a3ce-0d592571bf65 req-631079e8-1404-48ac-b388-bb43dc0ad685 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Received event network-vif-deleted-9463c863-5c42-4fc9-a0c8-a6c9de3bddcd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1460.744202] env[62619]: INFO nova.compute.manager [req-b9d8f5b6-37d2-44b8-a3ce-0d592571bf65 req-631079e8-1404-48ac-b388-bb43dc0ad685 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Neutron deleted interface 9463c863-5c42-4fc9-a0c8-a6c9de3bddcd; detaching it from the instance and deleting it from the info cache [ 1460.744465] env[62619]: DEBUG nova.network.neutron [req-b9d8f5b6-37d2-44b8-a3ce-0d592571bf65 req-631079e8-1404-48ac-b388-bb43dc0ad685 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.766566] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777403, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.777207] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1460.777534] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1460.777814] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Deleting the datastore file [datastore1] 6be4f813-7171-4515-a728-5cf34665205a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1460.778629] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bde11ae-b39e-45f0-8b12-3450a7d7f02c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.789173] env[62619]: DEBUG oslo_vmware.api [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1460.789173] env[62619]: value = "task-1777407" [ 1460.789173] env[62619]: _type = "Task" [ 1460.789173] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.800034] env[62619]: DEBUG oslo_vmware.api [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777407, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.815605] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777405, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.907929] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e241c148-2bc9-4997-bb54-28fb406e0088 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.421s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.982049] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 78c7a111-d497-4114-b4f4-07319e6e7df2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1461.014389] env[62619]: DEBUG nova.network.neutron [-] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.096401] env[62619]: DEBUG nova.compute.manager [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1461.097458] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e818d803-8e3c-4190-8cdd-916e341c2f16 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.167453] env[62619]: INFO nova.compute.manager [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Took 38.65 seconds to build instance. [ 1461.247255] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e4ebce6-24ca-4544-a8c8-1b1d98db580b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.261960] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dead8f3-4b8d-43ef-af72-e189942376d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.279762] env[62619]: DEBUG oslo_vmware.api [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777403, 'name': RemoveSnapshot_Task, 'duration_secs': 1.287532} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.279981] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1461.280221] env[62619]: INFO nova.compute.manager [None req-f7424d3a-67e8-4cf7-8146-a89f6825ebba tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Took 16.91 seconds to snapshot the instance on the hypervisor. [ 1461.296000] env[62619]: DEBUG nova.compute.manager [req-b9d8f5b6-37d2-44b8-a3ce-0d592571bf65 req-631079e8-1404-48ac-b388-bb43dc0ad685 service nova] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Detach interface failed, port_id=9463c863-5c42-4fc9-a0c8-a6c9de3bddcd, reason: Instance 7c058337-1684-4553-8e96-dd2cd1814a15 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1461.305559] env[62619]: DEBUG oslo_vmware.api [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777407, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262177} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.308804] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1461.308991] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1461.309177] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1461.309341] env[62619]: INFO nova.compute.manager [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Took 1.36 seconds to destroy the instance on the hypervisor. [ 1461.309566] env[62619]: DEBUG oslo.service.loopingcall [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1461.310386] env[62619]: DEBUG nova.compute.manager [-] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1461.311077] env[62619]: DEBUG nova.network.neutron [-] [instance: 6be4f813-7171-4515-a728-5cf34665205a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1461.318717] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777405, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579412} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.318717] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 94c9a119-5c04-4550-b55d-a4a2985385d3/94c9a119-5c04-4550-b55d-a4a2985385d3.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1461.318717] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1461.318717] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a15f520-159e-436b-b9bd-aa5a8b8a3831 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.327131] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1461.327131] env[62619]: value = "task-1777408" [ 1461.327131] env[62619]: _type = "Task" [ 1461.327131] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.340410] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777408, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.410868] env[62619]: DEBUG nova.compute.manager [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1461.483979] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cd8b8828-79cf-4a7c-b018-b8bd745aaa45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1461.518774] env[62619]: INFO nova.compute.manager [-] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Took 1.45 seconds to deallocate network for instance. [ 1461.609411] env[62619]: INFO nova.compute.manager [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] instance snapshotting [ 1461.609651] env[62619]: WARNING nova.compute.manager [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1461.613198] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747b1702-7ccf-48df-8e51-96a310c43908 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.640603] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b42b71e-458b-44a1-b22b-7b47206eb45e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.669313] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2035c252-cc14-450a-9167-6715291cec4b tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "ef41dd29-1270-4071-9e89-20132131de2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.268s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.754614] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "ef41dd29-1270-4071-9e89-20132131de2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.754898] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "ef41dd29-1270-4071-9e89-20132131de2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.755123] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "ef41dd29-1270-4071-9e89-20132131de2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.755307] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "ef41dd29-1270-4071-9e89-20132131de2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.755472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "ef41dd29-1270-4071-9e89-20132131de2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.758467] env[62619]: INFO nova.compute.manager [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Terminating instance [ 1461.837970] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777408, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162025} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.839080] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1461.839145] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d423dcd4-7562-448d-af8f-4759e4f65e3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.864192] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 94c9a119-5c04-4550-b55d-a4a2985385d3/94c9a119-5c04-4550-b55d-a4a2985385d3.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1461.864548] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a9030ae-c7bc-4acf-b2fa-48f953ab13c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.889185] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1461.889185] env[62619]: value = "task-1777409" [ 1461.889185] env[62619]: _type = "Task" [ 1461.889185] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.901490] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777409, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.948487] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.987906] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 39adf15c-f77e-4737-aeeb-258887007b9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1461.989175] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1461.989175] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1462.026334] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.156023] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1462.156023] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7c3d81a3-f549-4ebe-92e2-923c599b66e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.168176] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1462.168176] env[62619]: value = "task-1777410" [ 1462.168176] env[62619]: _type = "Task" [ 1462.168176] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.180027] env[62619]: DEBUG nova.compute.manager [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1462.182592] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777410, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.262067] env[62619]: DEBUG nova.compute.manager [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1462.262555] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1462.263895] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81623166-b662-4393-acb5-ce95203c7b7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.274311] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1462.275017] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53f315e7-e991-4a74-9fc6-cce662cafe3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.284439] env[62619]: DEBUG oslo_vmware.api [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1462.284439] env[62619]: value = "task-1777411" [ 1462.284439] env[62619]: _type = "Task" [ 1462.284439] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.302042] env[62619]: DEBUG oslo_vmware.api [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777411, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.336728] env[62619]: DEBUG nova.compute.manager [req-8ea759ae-21d5-4734-a525-6433d00fb1e0 req-ac8157f2-c8fa-4ebf-870c-0dce0f0bade3 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Received event network-changed-a1723b81-03da-4080-8d54-cacb839c3f1d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1462.337021] env[62619]: DEBUG nova.compute.manager [req-8ea759ae-21d5-4734-a525-6433d00fb1e0 req-ac8157f2-c8fa-4ebf-870c-0dce0f0bade3 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Refreshing instance network info cache due to event network-changed-a1723b81-03da-4080-8d54-cacb839c3f1d. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1462.337328] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ea759ae-21d5-4734-a525-6433d00fb1e0 req-ac8157f2-c8fa-4ebf-870c-0dce0f0bade3 service nova] Acquiring lock "refresh_cache-d4230edc-cfda-4b9f-ab42-2f39c699ff03" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.337515] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ea759ae-21d5-4734-a525-6433d00fb1e0 req-ac8157f2-c8fa-4ebf-870c-0dce0f0bade3 service nova] Acquired lock "refresh_cache-d4230edc-cfda-4b9f-ab42-2f39c699ff03" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.337740] env[62619]: DEBUG nova.network.neutron [req-8ea759ae-21d5-4734-a525-6433d00fb1e0 req-ac8157f2-c8fa-4ebf-870c-0dce0f0bade3 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Refreshing network info cache for port a1723b81-03da-4080-8d54-cacb839c3f1d {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1462.402175] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777409, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.646536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc888ef4-b8e1-4a0b-8c74-0a519ea16f0f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.655403] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf0c2f8-5678-441e-9090-911b6bfa020a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.689056] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6e4ce8-1868-444e-8496-4e52dd7a8246 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.699648] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777410, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.702457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4285b3cc-ba19-4c63-8720-7453894dab46 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.708924] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.716852] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.724370] env[62619]: DEBUG nova.network.neutron [-] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.797233] env[62619]: DEBUG oslo_vmware.api [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777411, 'name': PowerOffVM_Task, 'duration_secs': 0.262438} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.797513] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1462.797681] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1462.797928] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca2ca013-fd95-4094-9ab8-063ddad81b2a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.874510] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1462.874796] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1462.876263] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Deleting the datastore file [datastore1] ef41dd29-1270-4071-9e89-20132131de2d {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1462.876263] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ced36bdc-baca-405c-9ce2-ec800bb07d03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.883194] env[62619]: DEBUG oslo_vmware.api [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for the task: (returnval){ [ 1462.883194] env[62619]: value = "task-1777413" [ 1462.883194] env[62619]: _type = "Task" [ 1462.883194] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.885398] env[62619]: DEBUG nova.compute.manager [req-3a6d451b-7646-4731-afbc-3f363a4c19d5 req-76947b8e-9e06-4572-b23b-f9346ebc2d60 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received event network-vif-deleted-467d9eda-baf0-4ee1-b652-587781aeb8e3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1462.885573] env[62619]: DEBUG nova.compute.manager [req-3a6d451b-7646-4731-afbc-3f363a4c19d5 req-76947b8e-9e06-4572-b23b-f9346ebc2d60 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received event network-vif-deleted-fc867071-21de-47e5-b245-cdd32ff75559 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1462.885631] env[62619]: DEBUG nova.compute.manager [req-3a6d451b-7646-4731-afbc-3f363a4c19d5 req-76947b8e-9e06-4572-b23b-f9346ebc2d60 service nova] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Received event network-vif-deleted-bfe09361-c134-4a13-a07f-d903524c0546 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1462.897872] env[62619]: DEBUG oslo_vmware.api [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777413, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.903290] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777409, 'name': ReconfigVM_Task, 'duration_secs': 0.707161} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.903549] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 94c9a119-5c04-4550-b55d-a4a2985385d3/94c9a119-5c04-4550-b55d-a4a2985385d3.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1462.904800] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72ab16b5-d58c-4fe4-8dd0-49696073f617 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.911115] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1462.911115] env[62619]: value = "task-1777414" [ 1462.911115] env[62619]: _type = "Task" [ 1462.911115] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.919986] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777414, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.155395] env[62619]: DEBUG nova.network.neutron [req-8ea759ae-21d5-4734-a525-6433d00fb1e0 req-ac8157f2-c8fa-4ebf-870c-0dce0f0bade3 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Updated VIF entry in instance network info cache for port a1723b81-03da-4080-8d54-cacb839c3f1d. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1463.155821] env[62619]: DEBUG nova.network.neutron [req-8ea759ae-21d5-4734-a525-6433d00fb1e0 req-ac8157f2-c8fa-4ebf-870c-0dce0f0bade3 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Updating instance_info_cache with network_info: [{"id": "a1723b81-03da-4080-8d54-cacb839c3f1d", "address": "fa:16:3e:e1:e8:1c", "network": {"id": "286bf2fd-17d0-45ea-a774-8e5dd54931d9", "bridge": "br-int", "label": "tempest-ServersTestJSON-33521734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9b2c21c3d424352a680c5f9660a8d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1723b81-03", "ovs_interfaceid": "a1723b81-03da-4080-8d54-cacb839c3f1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.196111] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777410, 'name': CreateSnapshot_Task, 'duration_secs': 0.675639} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.196404] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1463.197427] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d7fc6b-2212-497e-9016-4f81bae9a8d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.220394] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1463.227077] env[62619]: INFO nova.compute.manager [-] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Took 1.92 seconds to deallocate network for instance. [ 1463.308017] env[62619]: DEBUG nova.compute.manager [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1463.308967] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7b31b1-2e47-498b-8197-ab3ed593ca8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.393617] env[62619]: DEBUG oslo_vmware.api [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Task: {'id': task-1777413, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166647} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.393854] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1463.394045] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1463.394217] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1463.394383] env[62619]: INFO nova.compute.manager [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1463.394614] env[62619]: DEBUG oslo.service.loopingcall [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1463.394790] env[62619]: DEBUG nova.compute.manager [-] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1463.394882] env[62619]: DEBUG nova.network.neutron [-] [instance: ef41dd29-1270-4071-9e89-20132131de2d] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1463.420504] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777414, 'name': Rename_Task, 'duration_secs': 0.208331} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.421282] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1463.421282] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76a2d40c-cc7f-4514-b362-74a2211f977f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.428935] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1463.428935] env[62619]: value = "task-1777415" [ 1463.428935] env[62619]: _type = "Task" [ 1463.428935] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.437064] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777415, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.658586] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ea759ae-21d5-4734-a525-6433d00fb1e0 req-ac8157f2-c8fa-4ebf-870c-0dce0f0bade3 service nova] Releasing lock "refresh_cache-d4230edc-cfda-4b9f-ab42-2f39c699ff03" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.716630] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1463.717083] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0b3f1f1d-ef39-4c46-898e-4dfd2e779ffb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.724908] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1463.725357] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.859s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.725534] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.967s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.727241] env[62619]: INFO nova.compute.claims [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1463.731243] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.731888] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Cleaning up deleted instances {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11609}} [ 1463.735810] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.735896] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1463.735896] env[62619]: value = "task-1777416" [ 1463.735896] env[62619]: _type = "Task" [ 1463.735896] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.746175] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777416, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.820571] env[62619]: INFO nova.compute.manager [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] instance snapshotting [ 1463.825513] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4e7c27-aa9e-4cba-aee6-a77001b4dca5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.848501] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0a1a58-33ad-4194-8721-29c9be1e9be8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.940406] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777415, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.157591] env[62619]: DEBUG nova.network.neutron [-] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.241302] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] There are 11 instances to clean {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11618}} [ 1464.241671] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 3e7d0ba8-1c7c-4e23-a30a-506e9e26cb41] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1464.258978] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777416, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.361304] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1464.361663] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7bc01c36-93e0-432c-a0c0-0837a89ecdbe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.370308] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1464.370308] env[62619]: value = "task-1777417" [ 1464.370308] env[62619]: _type = "Task" [ 1464.370308] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.379874] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777417, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.439473] env[62619]: DEBUG oslo_vmware.api [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777415, 'name': PowerOnVM_Task, 'duration_secs': 0.914165} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.439738] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1464.439938] env[62619]: INFO nova.compute.manager [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Took 10.80 seconds to spawn the instance on the hypervisor. [ 1464.440131] env[62619]: DEBUG nova.compute.manager [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1464.440974] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a874825-683b-4318-adaa-ea2b7cd6818c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.661508] env[62619]: INFO nova.compute.manager [-] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Took 1.27 seconds to deallocate network for instance. [ 1464.745116] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: d7b2d831-b2ae-445c-887b-290171ae5d80] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1464.763133] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777416, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.885554] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777417, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.944550] env[62619]: DEBUG nova.compute.manager [req-5310fba5-a580-4e94-bf2d-313e51c41973 req-74dd715c-43e0-4dea-ba21-7ae9de57b036 service nova] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Received event network-vif-deleted-a2b4c0ed-8a0f-463d-aa08-4ee89f0d45d1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1464.962021] env[62619]: INFO nova.compute.manager [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Took 33.58 seconds to build instance. [ 1465.167418] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.231360] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6460f2c-2ced-4c2a-b765-38e3161590c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.239789] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306baa91-4797-4e6e-89cd-8cc73fd6bcf6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.272718] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: f46de981-1f04-4baf-874c-de1b95d16f9d] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1465.278153] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1006aac-8ddb-4efe-860e-fd12eedc92ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.281659] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777416, 'name': CloneVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.287072] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39c263f-a57c-4c66-b946-f9e4f3f0d463 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.301219] env[62619]: DEBUG nova.compute.provider_tree [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1465.383620] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777417, 'name': CreateSnapshot_Task, 'duration_secs': 0.647488} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.383854] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1465.384594] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90094bdf-721d-4200-aabc-fbe6788ae1a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.464257] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f491c8f1-a6cb-47f5-9f47-f4fe144e19af tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "94c9a119-5c04-4550-b55d-a4a2985385d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.175s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.756237] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777416, 'name': CloneVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.782966] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 1847c5d8-16eb-4feb-8a09-24ad6728e59c] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1465.804299] env[62619]: DEBUG nova.scheduler.client.report [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1465.902168] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1465.902564] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-717cebfb-b1a1-4ae2-8d67-e68e175f0eaf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.913258] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1465.913258] env[62619]: value = "task-1777418" [ 1465.913258] env[62619]: _type = "Task" [ 1465.913258] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.921821] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777418, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.967064] env[62619]: DEBUG nova.compute.manager [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1466.067468] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea09250-68e2-4315-ac52-eaf8378dbd95 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.074924] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db77eeed-84b9-403e-adca-19c11e130511 tempest-ServersAdminNegativeTestJSON-1022281618 tempest-ServersAdminNegativeTestJSON-1022281618-project-admin] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Suspending the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1466.075215] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-0a3f44d2-7b04-4388-a14b-738167e0113f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.083771] env[62619]: DEBUG oslo_vmware.api [None req-db77eeed-84b9-403e-adca-19c11e130511 tempest-ServersAdminNegativeTestJSON-1022281618 tempest-ServersAdminNegativeTestJSON-1022281618-project-admin] Waiting for the task: (returnval){ [ 1466.083771] env[62619]: value = "task-1777419" [ 1466.083771] env[62619]: _type = "Task" [ 1466.083771] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.092592] env[62619]: DEBUG oslo_vmware.api [None req-db77eeed-84b9-403e-adca-19c11e130511 tempest-ServersAdminNegativeTestJSON-1022281618 tempest-ServersAdminNegativeTestJSON-1022281618-project-admin] Task: {'id': task-1777419, 'name': SuspendVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.258776] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777416, 'name': CloneVM_Task, 'duration_secs': 2.045756} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.259176] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Created linked-clone VM from snapshot [ 1466.260186] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceea151a-d518-49e9-8c25-c2f39d41d7e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.270204] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Uploading image 54bf1189-d033-4bcf-8e21-ca87dc7cdba3 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1466.285938] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e0e25ddd-3692-480f-bfa0-212741c0d882] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1466.296414] env[62619]: DEBUG oslo_vmware.rw_handles [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1466.296414] env[62619]: value = "vm-368966" [ 1466.296414] env[62619]: _type = "VirtualMachine" [ 1466.296414] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1466.296744] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-763235a8-0f24-4578-ad5d-9eeb6fae4704 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.306611] env[62619]: DEBUG oslo_vmware.rw_handles [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lease: (returnval){ [ 1466.306611] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528feeb0-2f49-012d-57a7-98b011960545" [ 1466.306611] env[62619]: _type = "HttpNfcLease" [ 1466.306611] env[62619]: } obtained for exporting VM: (result){ [ 1466.306611] env[62619]: value = "vm-368966" [ 1466.306611] env[62619]: _type = "VirtualMachine" [ 1466.306611] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1466.306994] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the lease: (returnval){ [ 1466.306994] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528feeb0-2f49-012d-57a7-98b011960545" [ 1466.306994] env[62619]: _type = "HttpNfcLease" [ 1466.306994] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1466.311798] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.312368] env[62619]: DEBUG nova.compute.manager [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1466.315095] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.924s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.315393] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.317249] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 31.504s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.323794] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1466.323794] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528feeb0-2f49-012d-57a7-98b011960545" [ 1466.323794] env[62619]: _type = "HttpNfcLease" [ 1466.323794] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1466.336178] env[62619]: INFO nova.scheduler.client.report [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Deleted allocations for instance d16bebd1-a144-4d73-8eb6-8ab12a08fe69 [ 1466.424076] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777418, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.486413] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.595418] env[62619]: DEBUG oslo_vmware.api [None req-db77eeed-84b9-403e-adca-19c11e130511 tempest-ServersAdminNegativeTestJSON-1022281618 tempest-ServersAdminNegativeTestJSON-1022281618-project-admin] Task: {'id': task-1777419, 'name': SuspendVM_Task} progress is 62%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.791187] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 28a8485c-fc0d-4fd0-8be9-37c49caf89b8] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1466.817311] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1466.817311] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528feeb0-2f49-012d-57a7-98b011960545" [ 1466.817311] env[62619]: _type = "HttpNfcLease" [ 1466.817311] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1466.817606] env[62619]: DEBUG oslo_vmware.rw_handles [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1466.817606] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528feeb0-2f49-012d-57a7-98b011960545" [ 1466.817606] env[62619]: _type = "HttpNfcLease" [ 1466.817606] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1466.818422] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8065bf3e-1a6b-44dd-bee8-17c4cec29356 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.824494] env[62619]: DEBUG nova.compute.utils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1466.829473] env[62619]: DEBUG nova.compute.manager [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1466.829628] env[62619]: DEBUG nova.network.neutron [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1466.846032] env[62619]: DEBUG oslo_vmware.rw_handles [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a797d-fc7b-5ef1-10e9-c78f5ef455b6/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1466.846032] env[62619]: DEBUG oslo_vmware.rw_handles [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a797d-fc7b-5ef1-10e9-c78f5ef455b6/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1466.847757] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240748fa-a569-4821-8559-d5b5f6b923de tempest-ServerRescueTestJSONUnderV235-454006046 tempest-ServerRescueTestJSONUnderV235-454006046-project-member] Lock "d16bebd1-a144-4d73-8eb6-8ab12a08fe69" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.600s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.914373] env[62619]: DEBUG nova.policy [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7de19beaddcd46718c91b9367b64981e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '333f40a8350d4a4586cd2236bc63bef9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1466.928567] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777418, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.968228] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-52630594-2165-4528-976d-9bf05f31a295 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.098788] env[62619]: DEBUG oslo_vmware.api [None req-db77eeed-84b9-403e-adca-19c11e130511 tempest-ServersAdminNegativeTestJSON-1022281618 tempest-ServersAdminNegativeTestJSON-1022281618-project-admin] Task: {'id': task-1777419, 'name': SuspendVM_Task, 'duration_secs': 0.770112} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.099090] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db77eeed-84b9-403e-adca-19c11e130511 tempest-ServersAdminNegativeTestJSON-1022281618 tempest-ServersAdminNegativeTestJSON-1022281618-project-admin] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Suspended the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1467.099319] env[62619]: DEBUG nova.compute.manager [None req-db77eeed-84b9-403e-adca-19c11e130511 tempest-ServersAdminNegativeTestJSON-1022281618 tempest-ServersAdminNegativeTestJSON-1022281618-project-admin] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1467.100159] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be74b02d-76d5-4b96-a3ca-bc069a63e828 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.296095] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e98120b4-7916-4ce4-88ef-0c904852bb1f] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1467.327177] env[62619]: DEBUG nova.compute.manager [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1467.422519] env[62619]: DEBUG nova.network.neutron [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Successfully created port: 13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1467.436936] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777418, 'name': CloneVM_Task, 'duration_secs': 1.386753} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.437442] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Created linked-clone VM from snapshot [ 1467.438288] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68949e72-5f80-4cb4-a7cd-5e7c04b0fdda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.448242] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Uploading image 6426eba5-d6e6-4449-b7b9-2ef0e724dd63 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1467.476189] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1467.476189] env[62619]: value = "vm-368968" [ 1467.476189] env[62619]: _type = "VirtualMachine" [ 1467.476189] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1467.477223] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3433ed80-ec41-4924-8163-df404d52a3d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.486210] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lease: (returnval){ [ 1467.486210] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d75105-a602-9f31-0adc-901829f63359" [ 1467.486210] env[62619]: _type = "HttpNfcLease" [ 1467.486210] env[62619]: } obtained for exporting VM: (result){ [ 1467.486210] env[62619]: value = "vm-368968" [ 1467.486210] env[62619]: _type = "VirtualMachine" [ 1467.486210] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1467.486699] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the lease: (returnval){ [ 1467.486699] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d75105-a602-9f31-0adc-901829f63359" [ 1467.486699] env[62619]: _type = "HttpNfcLease" [ 1467.486699] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1467.497856] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1467.497856] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d75105-a602-9f31-0adc-901829f63359" [ 1467.497856] env[62619]: _type = "HttpNfcLease" [ 1467.497856] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1467.503818] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87681873-cf6c-47a5-b5c3-8644d2aea2e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.514438] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13096fe5-a65d-48b3-9610-35c6ca391558 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.555527] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a252ae53-cb94-4182-ac34-dae122b68cb2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.565793] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dd0eaa-ffa6-41e5-8c4b-88403911ca3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.581723] env[62619]: DEBUG nova.compute.provider_tree [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1467.800219] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4f08d36b-f26e-499e-a4be-d8cbb481a44d] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1467.999223] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1467.999223] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d75105-a602-9f31-0adc-901829f63359" [ 1467.999223] env[62619]: _type = "HttpNfcLease" [ 1467.999223] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1467.999640] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1467.999640] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d75105-a602-9f31-0adc-901829f63359" [ 1467.999640] env[62619]: _type = "HttpNfcLease" [ 1467.999640] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1468.000536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3e0da0-b7e5-4e07-b241-80af6cbbc380 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.009200] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52301c7c-58f0-783c-c36e-b48d9ce28ab4/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1468.009458] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52301c7c-58f0-783c-c36e-b48d9ce28ab4/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1468.089022] env[62619]: DEBUG nova.scheduler.client.report [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1468.099342] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b150e959-6d46-4230-b56c-d4655a729959 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.307953] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: b334cb41-5ddf-4545-8e2a-97c4d1de7cbf] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1468.340312] env[62619]: DEBUG nova.compute.manager [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1468.375168] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1468.375168] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1468.375168] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1468.378617] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1468.378837] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1468.378997] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1468.379230] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1468.379404] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1468.379569] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1468.379716] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1468.379879] env[62619]: DEBUG nova.virt.hardware [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1468.380824] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e53348-53ee-4445-9d7c-6eb2fa4e3a72 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.391828] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2185645e-6a3d-43c1-85b3-4c0cd4568d5e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.810824] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: aa576459-65bf-4b16-ad1d-0930497522eb] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1469.099595] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.782s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.103243] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.349s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.108950] env[62619]: INFO nova.compute.claims [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1469.315442] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 08c1fef9-40fc-4420-91de-fe911dea70f7] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1469.688390] env[62619]: INFO nova.scheduler.client.report [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Deleted allocation for migration 610837f7-e212-4892-aec3-980eeb410f02 [ 1469.819685] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.819890] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Cleaning up deleted instances with incomplete migration {{(pid=62619) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11647}} [ 1469.902682] env[62619]: DEBUG nova.network.neutron [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Successfully updated port: 13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1470.198641] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c45ffd69-6ecd-40f4-a863-31f229c04b44 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 39.420s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.323339] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.404975] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.405171] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquired lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.405335] env[62619]: DEBUG nova.network.neutron [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1470.589654] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ca7685-dbb1-4a4f-9781-528967776c29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.600229] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c048e99b-59d1-4530-9c75-2c41d1c8dc8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.633101] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16f5774-632a-4c76-a824-e54299465507 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.641746] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94434df7-c724-4cd3-af34-309e16e5b5be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.656450] env[62619]: DEBUG nova.compute.provider_tree [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.938483] env[62619]: DEBUG nova.network.neutron [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1471.092017] env[62619]: DEBUG nova.network.neutron [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Updating instance_info_cache with network_info: [{"id": "13b7e2bb-07ca-4faa-aa62-69635847b2f7", "address": "fa:16:3e:07:1c:ed", "network": {"id": "d28ddb7a-9db3-465f-8343-8f23b12b5183", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2033557069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "333f40a8350d4a4586cd2236bc63bef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13b7e2bb-07", "ovs_interfaceid": "13b7e2bb-07ca-4faa-aa62-69635847b2f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1471.159467] env[62619]: DEBUG nova.scheduler.client.report [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1471.595412] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Releasing lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.595783] env[62619]: DEBUG nova.compute.manager [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Instance network_info: |[{"id": "13b7e2bb-07ca-4faa-aa62-69635847b2f7", "address": "fa:16:3e:07:1c:ed", "network": {"id": "d28ddb7a-9db3-465f-8343-8f23b12b5183", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2033557069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "333f40a8350d4a4586cd2236bc63bef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13b7e2bb-07", "ovs_interfaceid": "13b7e2bb-07ca-4faa-aa62-69635847b2f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1471.596306] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:1c:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13b7e2bb-07ca-4faa-aa62-69635847b2f7', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1471.604799] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Creating folder: Project (333f40a8350d4a4586cd2236bc63bef9). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1471.605206] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3339c60b-f700-4738-b2b4-8e5abde0bed9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.618106] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Created folder: Project (333f40a8350d4a4586cd2236bc63bef9) in parent group-v368875. [ 1471.618324] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Creating folder: Instances. Parent ref: group-v368969. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1471.618596] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9e707a1-cae9-4acc-ba56-c7dbe673478b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.630298] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Created folder: Instances in parent group-v368969. [ 1471.630636] env[62619]: DEBUG oslo.service.loopingcall [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1471.630872] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1471.631143] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f58967c-fbe2-44c6-9c80-14e56500cbff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.652974] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1471.652974] env[62619]: value = "task-1777424" [ 1471.652974] env[62619]: _type = "Task" [ 1471.652974] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.665510] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.666041] env[62619]: DEBUG nova.compute.manager [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1471.668978] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.378s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.671106] env[62619]: DEBUG nova.objects.instance [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1471.678015] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777424, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.166936] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777424, 'name': CreateVM_Task, 'duration_secs': 0.457844} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.166936] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1472.166936] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.166936] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.166936] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1472.166936] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b6fed72-2e88-45d4-9094-ba112397eb67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.174035] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1472.174035] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52593ed0-587f-3709-ca86-bad447466f8b" [ 1472.174035] env[62619]: _type = "Task" [ 1472.174035] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.183048] env[62619]: DEBUG nova.compute.utils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1472.185257] env[62619]: DEBUG nova.compute.manager [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1472.185478] env[62619]: DEBUG nova.network.neutron [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1472.195432] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52593ed0-587f-3709-ca86-bad447466f8b, 'name': SearchDatastore_Task, 'duration_secs': 0.014926} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.195722] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.195952] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1472.196185] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.196328] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.196503] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1472.196767] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c590b7d1-51ce-4af0-8174-57d3bf7dc9e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.208151] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1472.208340] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1472.209115] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef65f663-cb62-4491-8e99-06448997e6d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.216112] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1472.216112] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bcfdca-7e47-98cc-ec33-4ece2d2eeeaa" [ 1472.216112] env[62619]: _type = "Task" [ 1472.216112] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.225517] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bcfdca-7e47-98cc-ec33-4ece2d2eeeaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.246973] env[62619]: DEBUG nova.policy [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef6ad59bb4f040a58582ebdbc1393204', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8eb3cd57aa0c47798c99d55dbdf98126', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1472.531778] env[62619]: DEBUG nova.network.neutron [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Successfully created port: 5c068735-1d99-4f86-a405-99a38588ba2b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1472.683830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8aa7862-46d9-444a-a4ab-dcc1a5fc12e9 tempest-ServersAdmin275Test-1073091372 tempest-ServersAdmin275Test-1073091372-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.689570] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.130s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.689570] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.004s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.693381] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.696s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.693381] env[62619]: INFO nova.compute.claims [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1472.697749] env[62619]: DEBUG nova.compute.manager [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1472.730611] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bcfdca-7e47-98cc-ec33-4ece2d2eeeaa, 'name': SearchDatastore_Task, 'duration_secs': 0.014822} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.730783] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5e7be85-05a1-4fb5-88e4-b9cb93f18302 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.737685] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1472.737685] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef15af-2246-d399-1acb-691e5a379c76" [ 1472.737685] env[62619]: _type = "Task" [ 1472.737685] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.748365] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef15af-2246-d399-1acb-691e5a379c76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.749600] env[62619]: INFO nova.scheduler.client.report [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Deleted allocations for instance 4374c102-a6fe-45ef-ad49-a1295f96899a [ 1473.251585] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef15af-2246-d399-1acb-691e5a379c76, 'name': SearchDatastore_Task, 'duration_secs': 0.015088} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.251866] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.252238] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 9014ef05-64d1-4bd6-9f2e-db58003b6520/9014ef05-64d1-4bd6-9f2e-db58003b6520.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1473.252630] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e934dad4-b193-49bd-a0b4-99c4b6463445 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.258739] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8576b23-21d9-47b0-8f16-c29a48a603fc tempest-ServerTagsTestJSON-2115815453 tempest-ServerTagsTestJSON-2115815453-project-member] Lock "4374c102-a6fe-45ef-ad49-a1295f96899a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.067s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.266424] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1473.266424] env[62619]: value = "task-1777425" [ 1473.266424] env[62619]: _type = "Task" [ 1473.266424] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.277117] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.713136] env[62619]: DEBUG nova.compute.manager [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1473.778600] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777425, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.072814] env[62619]: DEBUG nova.network.neutron [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Successfully updated port: 5c068735-1d99-4f86-a405-99a38588ba2b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1474.163928] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f60fa7c-1edd-4f06-8372-6fbc107cef88 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.173772] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9de9d8e-7392-4533-a178-c6e9512400a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.221478] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908fb9ac-fab0-4ddd-9cd4-3f768129f635 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.231356] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bef1721-2368-471d-bf29-5b88adab647f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.254631] env[62619]: DEBUG nova.compute.provider_tree [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1474.294430] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777425, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663188} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.295123] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 9014ef05-64d1-4bd6-9f2e-db58003b6520/9014ef05-64d1-4bd6-9f2e-db58003b6520.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1474.295387] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1474.296247] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b50a5c1-6bd7-4f00-9f92-fd4ee10b635c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.305713] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1474.305713] env[62619]: value = "task-1777426" [ 1474.305713] env[62619]: _type = "Task" [ 1474.305713] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.323451] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777426, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.439703] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1474.440654] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1474.440654] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1474.440654] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1474.440654] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1474.440654] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1474.440880] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1474.440915] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1474.441097] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1474.441260] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1474.441431] env[62619]: DEBUG nova.virt.hardware [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1474.443805] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5a2a55-b057-4445-a79f-c1278beacb14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.456730] env[62619]: DEBUG oslo_vmware.rw_handles [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a797d-fc7b-5ef1-10e9-c78f5ef455b6/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1474.458224] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8991f15a-b017-48ca-9863-21635a929d7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.462845] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db58ec63-f534-483c-81bc-c1a05af61fb1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.469751] env[62619]: DEBUG oslo_vmware.rw_handles [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a797d-fc7b-5ef1-10e9-c78f5ef455b6/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1474.469921] env[62619]: ERROR oslo_vmware.rw_handles [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a797d-fc7b-5ef1-10e9-c78f5ef455b6/disk-0.vmdk due to incomplete transfer. [ 1474.477855] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8e1944d2-14f8-46a9-86ae-72b7c530554d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.487389] env[62619]: DEBUG oslo_vmware.rw_handles [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a797d-fc7b-5ef1-10e9-c78f5ef455b6/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1474.487389] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Uploaded image 54bf1189-d033-4bcf-8e21-ca87dc7cdba3 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1474.489411] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1474.489688] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-94c5c9ff-2a29-4260-856a-cf2f0b6201e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.497563] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1474.497563] env[62619]: value = "task-1777427" [ 1474.497563] env[62619]: _type = "Task" [ 1474.497563] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.507053] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777427, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.576992] env[62619]: DEBUG nova.compute.manager [req-a6dc293d-f03c-47da-93ea-6f4a1db7f4e1 req-1e2cb7de-50b4-483d-8127-cae55819ca7a service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Received event network-vif-plugged-13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1474.576992] env[62619]: DEBUG oslo_concurrency.lockutils [req-a6dc293d-f03c-47da-93ea-6f4a1db7f4e1 req-1e2cb7de-50b4-483d-8127-cae55819ca7a service nova] Acquiring lock "9014ef05-64d1-4bd6-9f2e-db58003b6520-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.577553] env[62619]: DEBUG oslo_concurrency.lockutils [req-a6dc293d-f03c-47da-93ea-6f4a1db7f4e1 req-1e2cb7de-50b4-483d-8127-cae55819ca7a service nova] Lock "9014ef05-64d1-4bd6-9f2e-db58003b6520-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.577740] env[62619]: DEBUG oslo_concurrency.lockutils [req-a6dc293d-f03c-47da-93ea-6f4a1db7f4e1 req-1e2cb7de-50b4-483d-8127-cae55819ca7a service nova] Lock "9014ef05-64d1-4bd6-9f2e-db58003b6520-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.579142] env[62619]: DEBUG nova.compute.manager [req-a6dc293d-f03c-47da-93ea-6f4a1db7f4e1 req-1e2cb7de-50b4-483d-8127-cae55819ca7a service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] No waiting events found dispatching network-vif-plugged-13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1474.579142] env[62619]: WARNING nova.compute.manager [req-a6dc293d-f03c-47da-93ea-6f4a1db7f4e1 req-1e2cb7de-50b4-483d-8127-cae55819ca7a service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Received unexpected event network-vif-plugged-13b7e2bb-07ca-4faa-aa62-69635847b2f7 for instance with vm_state building and task_state spawning. [ 1474.579142] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquiring lock "refresh_cache-40eeb844-7423-4818-8095-81062c7e6392" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.579142] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquired lock "refresh_cache-40eeb844-7423-4818-8095-81062c7e6392" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.579698] env[62619]: DEBUG nova.network.neutron [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1474.758083] env[62619]: DEBUG nova.scheduler.client.report [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1474.818480] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.819118] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777426, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.163732} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.819449] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.819774] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1474.821213] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1474.822135] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05892fd2-212e-4c3d-87f6-f88f6a8e0d27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.848209] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] 9014ef05-64d1-4bd6-9f2e-db58003b6520/9014ef05-64d1-4bd6-9f2e-db58003b6520.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1474.849183] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-665a46c9-130b-4982-8e22-6933a52eaca1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.876120] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1474.876120] env[62619]: value = "task-1777428" [ 1474.876120] env[62619]: _type = "Task" [ 1474.876120] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.882658] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777428, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.009625] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777427, 'name': Destroy_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.164675] env[62619]: DEBUG nova.network.neutron [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1475.264186] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.572s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.264786] env[62619]: DEBUG nova.compute.manager [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1475.267971] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.965s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.269489] env[62619]: INFO nova.compute.claims [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1475.386345] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777428, 'name': ReconfigVM_Task, 'duration_secs': 0.336567} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.386604] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Reconfigured VM instance instance-00000022 to attach disk [datastore1] 9014ef05-64d1-4bd6-9f2e-db58003b6520/9014ef05-64d1-4bd6-9f2e-db58003b6520.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1475.387218] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a34ae9fc-40eb-46b8-abfd-da59fa884f5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.395559] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1475.395559] env[62619]: value = "task-1777429" [ 1475.395559] env[62619]: _type = "Task" [ 1475.395559] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.405729] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777429, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.414301] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "refresh_cache-ac03bcf3-61df-4557-8018-0ad54ef30f17" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.414725] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquired lock "refresh_cache-ac03bcf3-61df-4557-8018-0ad54ef30f17" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.415035] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1475.514380] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777427, 'name': Destroy_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.565363] env[62619]: DEBUG nova.network.neutron [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Updating instance_info_cache with network_info: [{"id": "5c068735-1d99-4f86-a405-99a38588ba2b", "address": "fa:16:3e:49:d7:a2", "network": {"id": "a2de4aa2-b0a7-4709-bd23-0f0a97e6683e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358443003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8eb3cd57aa0c47798c99d55dbdf98126", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c068735-1d", "ovs_interfaceid": "5c068735-1d99-4f86-a405-99a38588ba2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.774392] env[62619]: DEBUG nova.compute.utils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1475.778544] env[62619]: DEBUG nova.compute.manager [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1475.907289] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777429, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.017827] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777427, 'name': Destroy_Task, 'duration_secs': 1.434602} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.018860] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Destroyed the VM [ 1476.019372] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1476.019729] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c644d14c-7639-4799-aaf0-5d8e45cae7d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.030073] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1476.030073] env[62619]: value = "task-1777430" [ 1476.030073] env[62619]: _type = "Task" [ 1476.030073] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.041345] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777430, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.072735] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52301c7c-58f0-783c-c36e-b48d9ce28ab4/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1476.072735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Releasing lock "refresh_cache-40eeb844-7423-4818-8095-81062c7e6392" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.072735] env[62619]: DEBUG nova.compute.manager [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Instance network_info: |[{"id": "5c068735-1d99-4f86-a405-99a38588ba2b", "address": "fa:16:3e:49:d7:a2", "network": {"id": "a2de4aa2-b0a7-4709-bd23-0f0a97e6683e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358443003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8eb3cd57aa0c47798c99d55dbdf98126", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c068735-1d", "ovs_interfaceid": "5c068735-1d99-4f86-a405-99a38588ba2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1476.074027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8678d532-799f-4074-bb47-be6a30431ff0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.077767] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:d7:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c894ab55-c869-4530-9702-cb46d173ce94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c068735-1d99-4f86-a405-99a38588ba2b', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1476.085054] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Creating folder: Project (8eb3cd57aa0c47798c99d55dbdf98126). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1476.085693] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-765bacae-3741-4091-8bde-3954509559a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.094983] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52301c7c-58f0-783c-c36e-b48d9ce28ab4/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1476.095320] env[62619]: ERROR oslo_vmware.rw_handles [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52301c7c-58f0-783c-c36e-b48d9ce28ab4/disk-0.vmdk due to incomplete transfer. [ 1476.095560] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-41c13b19-738d-4f18-8324-74c9d620159a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.106749] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Created folder: Project (8eb3cd57aa0c47798c99d55dbdf98126) in parent group-v368875. [ 1476.106749] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Creating folder: Instances. Parent ref: group-v368972. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1476.106889] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52301c7c-58f0-783c-c36e-b48d9ce28ab4/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1476.108830] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Uploaded image 6426eba5-d6e6-4449-b7b9-2ef0e724dd63 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1476.110279] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1476.112841] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b162b2e-156d-4302-a342-cd7fc97c7ef6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.115335] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9f6a4ff9-fa8d-4339-a819-6c1f62c6a84e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.124036] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1476.124036] env[62619]: value = "task-1777433" [ 1476.124036] env[62619]: _type = "Task" [ 1476.124036] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.131055] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Created folder: Instances in parent group-v368972. [ 1476.131458] env[62619]: DEBUG oslo.service.loopingcall [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.133075] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1476.134371] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4ce5cfa-d8a9-4e52-a113-e18aab2d3383 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.156706] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777433, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.164472] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1476.164472] env[62619]: value = "task-1777434" [ 1476.164472] env[62619]: _type = "Task" [ 1476.164472] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.179133] env[62619]: DEBUG oslo_concurrency.lockutils [None req-667f8000-b1be-4d20-84a6-8d0f926215cc tempest-ServersListShow296Test-20887118 tempest-ServersListShow296Test-20887118-project-member] Acquiring lock "5232c3cc-29eb-43e0-91e3-763b778c3183" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.179133] env[62619]: DEBUG oslo_concurrency.lockutils [None req-667f8000-b1be-4d20-84a6-8d0f926215cc tempest-ServersListShow296Test-20887118 tempest-ServersListShow296Test-20887118-project-member] Lock "5232c3cc-29eb-43e0-91e3-763b778c3183" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.187274] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777434, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.281848] env[62619]: DEBUG nova.compute.manager [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1476.409980] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777429, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.445902] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "94c9a119-5c04-4550-b55d-a4a2985385d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.448609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "94c9a119-5c04-4550-b55d-a4a2985385d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.449219] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "94c9a119-5c04-4550-b55d-a4a2985385d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.449364] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "94c9a119-5c04-4550-b55d-a4a2985385d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.449611] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "94c9a119-5c04-4550-b55d-a4a2985385d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.454541] env[62619]: INFO nova.compute.manager [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Terminating instance [ 1476.543196] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777430, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.593675] env[62619]: DEBUG oslo_concurrency.lockutils [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "2a41be15-efaf-4e78-a278-2711cb11e98f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.594066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "2a41be15-efaf-4e78-a278-2711cb11e98f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.594275] env[62619]: DEBUG oslo_concurrency.lockutils [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "2a41be15-efaf-4e78-a278-2711cb11e98f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.595136] env[62619]: DEBUG oslo_concurrency.lockutils [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "2a41be15-efaf-4e78-a278-2711cb11e98f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.595136] env[62619]: DEBUG oslo_concurrency.lockutils [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "2a41be15-efaf-4e78-a278-2711cb11e98f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.597923] env[62619]: INFO nova.compute.manager [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Terminating instance [ 1476.642600] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777433, 'name': Destroy_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.684423] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777434, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.858059] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Updating instance_info_cache with network_info: [{"id": "b6faf342-2332-4eee-bdde-dafce4f0a856", "address": "fa:16:3e:b8:19:39", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6faf342-23", "ovs_interfaceid": "b6faf342-2332-4eee-bdde-dafce4f0a856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.887196] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa021418-3640-4f36-ba4b-f445ae1ac009 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.895054] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ccc568-b2c4-49f0-86c4-d8b017ff30e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.908379] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777429, 'name': Rename_Task, 'duration_secs': 1.248725} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.935305] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1476.936301] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cdc9f287-f82b-4f0a-9b2c-3573e2487fb1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.938571] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006f2531-9835-4fee-8f67-a4d7fd7598ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.949571] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9aa8061-2ae5-445d-9c84-6d469218e03c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.953646] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1476.953646] env[62619]: value = "task-1777435" [ 1476.953646] env[62619]: _type = "Task" [ 1476.953646] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.965479] env[62619]: DEBUG nova.compute.manager [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1476.965672] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1476.966201] env[62619]: DEBUG nova.compute.provider_tree [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1476.968349] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192835de-b7b5-4e28-b229-9c11719fe19d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.975579] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777435, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.981355] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1476.982106] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4f050e1-43ad-4f03-8695-e62ec8dfbfd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.041825] env[62619]: DEBUG oslo_vmware.api [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777430, 'name': RemoveSnapshot_Task, 'duration_secs': 0.88169} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.043045] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1477.043654] env[62619]: INFO nova.compute.manager [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Took 15.43 seconds to snapshot the instance on the hypervisor. [ 1477.048592] env[62619]: DEBUG nova.compute.manager [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Received event network-changed-13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1477.049009] env[62619]: DEBUG nova.compute.manager [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Refreshing instance network info cache due to event network-changed-13b7e2bb-07ca-4faa-aa62-69635847b2f7. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1477.049009] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] Acquiring lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.049147] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] Acquired lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.049317] env[62619]: DEBUG nova.network.neutron [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Refreshing network info cache for port 13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1477.108914] env[62619]: DEBUG nova.compute.manager [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1477.109219] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1477.110131] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782800f9-47cd-44a9-97ed-abc4bc983f30 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.119739] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1477.119996] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63b21bef-fef1-41f0-9b7e-769419cb1d92 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.135167] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777433, 'name': Destroy_Task, 'duration_secs': 0.583992} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.135446] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Destroyed the VM [ 1477.135757] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1477.135935] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-84b34990-d18a-4ea0-a03b-55555c1a91a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.143975] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1477.143975] env[62619]: value = "task-1777438" [ 1477.143975] env[62619]: _type = "Task" [ 1477.143975] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.154678] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777438, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.175805] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777434, 'name': CreateVM_Task, 'duration_secs': 0.564772} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.175805] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1477.176465] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.176630] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.176961] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1477.177275] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd8ddba9-7c3f-4366-bb23-0df849681e99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.183853] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for the task: (returnval){ [ 1477.183853] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fec5d1-afa8-d1f7-831e-fae4ffa8d1d3" [ 1477.183853] env[62619]: _type = "Task" [ 1477.183853] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.194500] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fec5d1-afa8-d1f7-831e-fae4ffa8d1d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.302657] env[62619]: DEBUG nova.compute.manager [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1477.331154] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1477.331154] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1477.331154] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1477.331154] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1477.331154] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1477.331559] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1477.331908] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1477.332269] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1477.332646] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1477.333114] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1477.333425] env[62619]: DEBUG nova.virt.hardware [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1477.335474] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50be6c79-1598-42ff-be52-cd5544d30598 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.346119] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4990d9e2-719f-4a85-910f-c19782c1afa6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.365166] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Releasing lock "refresh_cache-ac03bcf3-61df-4557-8018-0ad54ef30f17" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.365166] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1477.365166] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1477.370975] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Creating folder: Project (bc490b2d66a84667a375034115b77c8a). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1477.371338] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.377032] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d464095-363f-437f-a330-fc782f0768f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.377032] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.377032] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.377032] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.377032] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.377363] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.377363] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1477.377363] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.386825] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Created folder: Project (bc490b2d66a84667a375034115b77c8a) in parent group-v368875. [ 1477.387214] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Creating folder: Instances. Parent ref: group-v368975. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1477.387403] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-500d9a71-a1a5-494b-b108-6efd2d42ca63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.398299] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Created folder: Instances in parent group-v368975. [ 1477.398573] env[62619]: DEBUG oslo.service.loopingcall [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1477.398766] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1477.400496] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4f4e2c9-01d1-4eb0-88c5-9e1a9c55c844 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.420277] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1477.420277] env[62619]: value = "task-1777441" [ 1477.420277] env[62619]: _type = "Task" [ 1477.420277] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.436141] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777441, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.464666] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777435, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.476714] env[62619]: DEBUG nova.scheduler.client.report [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1477.551468] env[62619]: DEBUG nova.compute.manager [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Instance disappeared during snapshot {{(pid=62619) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1477.565927] env[62619]: DEBUG nova.compute.manager [None req-ef5101ec-339c-4a57-9d26-02e74d0ca56e tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image not found during clean up 54bf1189-d033-4bcf-8e21-ca87dc7cdba3 {{(pid=62619) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 1477.651888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "eca829be-d425-4668-9ebd-1247c5ff19d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.652279] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "eca829be-d425-4668-9ebd-1247c5ff19d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.674390] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777438, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.699212] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fec5d1-afa8-d1f7-831e-fae4ffa8d1d3, 'name': SearchDatastore_Task, 'duration_secs': 0.024627} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.699650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.699900] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1477.700204] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.700299] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.700598] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1477.703837] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4285da2e-fd84-4ed5-9a4b-bd95773ab666 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.721526] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1477.721957] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1477.722313] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleting the datastore file [datastore1] 2a41be15-efaf-4e78-a278-2711cb11e98f {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1477.723365] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ea91c87-75f9-4807-b7ae-55ab5b3e8d7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.737450] env[62619]: DEBUG oslo_vmware.api [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1477.737450] env[62619]: value = "task-1777442" [ 1477.737450] env[62619]: _type = "Task" [ 1477.737450] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.744660] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1477.744846] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1477.750809] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e18d0f35-77ce-445f-a46b-7c57f467597e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.754176] env[62619]: DEBUG oslo_vmware.api [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.755919] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1477.756189] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1477.756857] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Deleting the datastore file [datastore1] 94c9a119-5c04-4550-b55d-a4a2985385d3 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1477.757417] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f119006b-56d5-4ec3-8023-4ae1c3b62e81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.763800] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for the task: (returnval){ [ 1477.763800] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5276a5e2-26ff-2c34-a76e-f62a45789bce" [ 1477.763800] env[62619]: _type = "Task" [ 1477.763800] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.772236] env[62619]: DEBUG oslo_vmware.api [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1477.772236] env[62619]: value = "task-1777443" [ 1477.772236] env[62619]: _type = "Task" [ 1477.772236] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.786435] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5276a5e2-26ff-2c34-a76e-f62a45789bce, 'name': SearchDatastore_Task, 'duration_secs': 0.011527} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.788579] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be380951-b35c-42f0-bb55-6aecd9ed9960 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.795992] env[62619]: DEBUG oslo_vmware.api [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777443, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.801445] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for the task: (returnval){ [ 1477.801445] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dbece9-6f39-4ff2-81b8-482551deaf38" [ 1477.801445] env[62619]: _type = "Task" [ 1477.801445] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.815731] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dbece9-6f39-4ff2-81b8-482551deaf38, 'name': SearchDatastore_Task, 'duration_secs': 0.011019} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.816140] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.816355] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 40eeb844-7423-4818-8095-81062c7e6392/40eeb844-7423-4818-8095-81062c7e6392.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1477.816749] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f524f3f-02d7-4b07-b778-2243e4ef9299 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.828261] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for the task: (returnval){ [ 1477.828261] env[62619]: value = "task-1777444" [ 1477.828261] env[62619]: _type = "Task" [ 1477.828261] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.841193] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.884445] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.930398] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777441, 'name': CreateVM_Task, 'duration_secs': 0.37309} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.930809] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1477.930973] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.931148] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.931561] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1477.931700] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19f4649e-5844-447f-ab98-e8edbf225845 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.936311] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for the task: (returnval){ [ 1477.936311] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ddd2b3-3c1a-e321-0d16-da79dedbcf21" [ 1477.936311] env[62619]: _type = "Task" [ 1477.936311] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.944679] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ddd2b3-3c1a-e321-0d16-da79dedbcf21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.963111] env[62619]: DEBUG oslo_vmware.api [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777435, 'name': PowerOnVM_Task, 'duration_secs': 0.875952} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.963363] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1477.963555] env[62619]: INFO nova.compute.manager [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Took 9.62 seconds to spawn the instance on the hypervisor. [ 1477.963725] env[62619]: DEBUG nova.compute.manager [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1477.964476] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdb8d13-2c4e-4ccc-bc42-f96144e22ad3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.982935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.982935] env[62619]: DEBUG nova.compute.manager [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1477.983643] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.291s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.983832] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.986820] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.944s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.987292] env[62619]: INFO nova.compute.claims [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1478.017242] env[62619]: INFO nova.scheduler.client.report [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Deleted allocations for instance 91ce0ab3-4fa4-4992-995a-0baeec91d9d0 [ 1478.039724] env[62619]: DEBUG nova.network.neutron [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Updated VIF entry in instance network info cache for port 13b7e2bb-07ca-4faa-aa62-69635847b2f7. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1478.039724] env[62619]: DEBUG nova.network.neutron [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Updating instance_info_cache with network_info: [{"id": "13b7e2bb-07ca-4faa-aa62-69635847b2f7", "address": "fa:16:3e:07:1c:ed", "network": {"id": "d28ddb7a-9db3-465f-8343-8f23b12b5183", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2033557069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "333f40a8350d4a4586cd2236bc63bef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13b7e2bb-07", "ovs_interfaceid": "13b7e2bb-07ca-4faa-aa62-69635847b2f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.168086] env[62619]: DEBUG oslo_vmware.api [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777438, 'name': RemoveSnapshot_Task, 'duration_secs': 0.729244} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.168418] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1478.168641] env[62619]: INFO nova.compute.manager [None req-9bc00bc5-7b2b-4b29-8b24-33228c4f04d8 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Took 14.34 seconds to snapshot the instance on the hypervisor. [ 1478.247228] env[62619]: DEBUG oslo_vmware.api [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777442, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179356} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.247383] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1478.247614] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1478.247710] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1478.247880] env[62619]: INFO nova.compute.manager [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1478.248134] env[62619]: DEBUG oslo.service.loopingcall [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.248326] env[62619]: DEBUG nova.compute.manager [-] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1478.248418] env[62619]: DEBUG nova.network.neutron [-] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1478.282609] env[62619]: DEBUG oslo_vmware.api [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777443, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179711} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.282930] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1478.283132] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1478.283309] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1478.283476] env[62619]: INFO nova.compute.manager [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1478.283713] env[62619]: DEBUG oslo.service.loopingcall [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.283899] env[62619]: DEBUG nova.compute.manager [-] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1478.283991] env[62619]: DEBUG nova.network.neutron [-] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1478.342479] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.448031] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ddd2b3-3c1a-e321-0d16-da79dedbcf21, 'name': SearchDatastore_Task, 'duration_secs': 0.013517} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.448350] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.448692] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1478.448851] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.448999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.449602] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1478.449602] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba7c3cc7-0e63-4b96-afe8-65421c8b600e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.458552] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1478.458728] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1478.459471] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51e043ac-58d4-4550-8f67-cac4e0f02b8b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.465608] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for the task: (returnval){ [ 1478.465608] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52878810-d7a8-4c19-5d00-37a31f08c169" [ 1478.465608] env[62619]: _type = "Task" [ 1478.465608] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.482063] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52878810-d7a8-4c19-5d00-37a31f08c169, 'name': SearchDatastore_Task, 'duration_secs': 0.009978} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.483790] env[62619]: INFO nova.compute.manager [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Took 44.75 seconds to build instance. [ 1478.484593] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44ed48e4-390d-4af6-86fb-8e12a4d2f948 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.493753] env[62619]: DEBUG nova.compute.utils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1478.497540] env[62619]: DEBUG nova.compute.manager [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1478.497540] env[62619]: DEBUG nova.network.neutron [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1478.500028] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for the task: (returnval){ [ 1478.500028] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529e7721-04c3-b031-49a8-b264f5f30846" [ 1478.500028] env[62619]: _type = "Task" [ 1478.500028] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.514626] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529e7721-04c3-b031-49a8-b264f5f30846, 'name': SearchDatastore_Task, 'duration_secs': 0.011694} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.514869] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.515160] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 597c0f95-5798-4022-8e2e-89a700698d7a/597c0f95-5798-4022-8e2e-89a700698d7a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1478.515418] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c362309d-9ed2-43e3-8e55-169d022b7780 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.523274] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for the task: (returnval){ [ 1478.523274] env[62619]: value = "task-1777445" [ 1478.523274] env[62619]: _type = "Task" [ 1478.523274] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.530155] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb9a2a1c-47fe-44cf-9d61-64f2560034c4 tempest-ServerMetadataTestJSON-977013988 tempest-ServerMetadataTestJSON-977013988-project-member] Lock "91ce0ab3-4fa4-4992-995a-0baeec91d9d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.613s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.537760] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777445, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.542331] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] Releasing lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.542696] env[62619]: DEBUG nova.compute.manager [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Received event network-vif-plugged-5c068735-1d99-4f86-a405-99a38588ba2b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1478.542982] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] Acquiring lock "40eeb844-7423-4818-8095-81062c7e6392-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.543289] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] Lock "40eeb844-7423-4818-8095-81062c7e6392-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.543389] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] Lock "40eeb844-7423-4818-8095-81062c7e6392-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.543499] env[62619]: DEBUG nova.compute.manager [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] No waiting events found dispatching network-vif-plugged-5c068735-1d99-4f86-a405-99a38588ba2b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1478.543712] env[62619]: WARNING nova.compute.manager [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Received unexpected event network-vif-plugged-5c068735-1d99-4f86-a405-99a38588ba2b for instance with vm_state building and task_state spawning. [ 1478.544084] env[62619]: DEBUG nova.compute.manager [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Received event network-changed-5c068735-1d99-4f86-a405-99a38588ba2b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1478.544386] env[62619]: DEBUG nova.compute.manager [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Refreshing instance network info cache due to event network-changed-5c068735-1d99-4f86-a405-99a38588ba2b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1478.544860] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] Acquiring lock "refresh_cache-40eeb844-7423-4818-8095-81062c7e6392" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.545160] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] Acquired lock "refresh_cache-40eeb844-7423-4818-8095-81062c7e6392" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.545376] env[62619]: DEBUG nova.network.neutron [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Refreshing network info cache for port 5c068735-1d99-4f86-a405-99a38588ba2b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.562078] env[62619]: DEBUG nova.policy [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cb2cfd0e64f4035846dbae1a2d3174b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4a61b4c4b2b42a1933ef647b146b530', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1478.746800] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.747200] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.840078] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777444, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.987588] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0e63a6a5-2f51-40cd-a78e-8074ff2c1d98 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "9014ef05-64d1-4bd6-9f2e-db58003b6520" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.105s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.005327] env[62619]: DEBUG nova.compute.manager [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1479.038231] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777445, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.071270] env[62619]: DEBUG nova.network.neutron [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Successfully created port: 1e21d673-0db1-49d8-b86e-c8b8568b7452 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1479.240176] env[62619]: DEBUG nova.compute.manager [req-95e6636a-00e1-44c4-a628-7f374b131691 req-7994e001-e5cb-4e1f-8c3d-5675372c696c service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Received event network-vif-deleted-649859d4-a599-4566-8f67-2a6e320625c0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1479.240379] env[62619]: INFO nova.compute.manager [req-95e6636a-00e1-44c4-a628-7f374b131691 req-7994e001-e5cb-4e1f-8c3d-5675372c696c service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Neutron deleted interface 649859d4-a599-4566-8f67-2a6e320625c0; detaching it from the instance and deleting it from the info cache [ 1479.240598] env[62619]: DEBUG nova.network.neutron [req-95e6636a-00e1-44c4-a628-7f374b131691 req-7994e001-e5cb-4e1f-8c3d-5675372c696c service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.330238] env[62619]: DEBUG nova.network.neutron [-] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.350371] env[62619]: DEBUG nova.network.neutron [-] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.351981] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777444, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.490726] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1479.541049] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777445, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.960776} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.541306] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 597c0f95-5798-4022-8e2e-89a700698d7a/597c0f95-5798-4022-8e2e-89a700698d7a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1479.541527] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1479.541838] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-835d4819-36fb-4da3-b4a3-3582d1e15f0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.552749] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for the task: (returnval){ [ 1479.552749] env[62619]: value = "task-1777446" [ 1479.552749] env[62619]: _type = "Task" [ 1479.552749] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.561648] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777446, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.568139] env[62619]: DEBUG nova.network.neutron [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Updated VIF entry in instance network info cache for port 5c068735-1d99-4f86-a405-99a38588ba2b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1479.568139] env[62619]: DEBUG nova.network.neutron [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Updating instance_info_cache with network_info: [{"id": "5c068735-1d99-4f86-a405-99a38588ba2b", "address": "fa:16:3e:49:d7:a2", "network": {"id": "a2de4aa2-b0a7-4709-bd23-0f0a97e6683e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358443003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8eb3cd57aa0c47798c99d55dbdf98126", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c068735-1d", "ovs_interfaceid": "5c068735-1d99-4f86-a405-99a38588ba2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.631210] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6ad4a2-fbde-46a9-96c1-0a838099db02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.643828] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd8a15e-f81d-414a-9173-466fbd1691b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.698035] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b383a090-6029-4eb8-ac7f-2ac7df0a287c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.706490] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467fa869-d44e-4a65-9107-1295c4b66663 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.720508] env[62619]: DEBUG nova.compute.provider_tree [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1479.746514] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce3f8592-cebf-4c9d-852c-2d885720c27e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.757046] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4350a076-5a32-4734-abee-11ff601c60e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.789993] env[62619]: DEBUG nova.compute.manager [req-95e6636a-00e1-44c4-a628-7f374b131691 req-7994e001-e5cb-4e1f-8c3d-5675372c696c service nova] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Detach interface failed, port_id=649859d4-a599-4566-8f67-2a6e320625c0, reason: Instance 94c9a119-5c04-4550-b55d-a4a2985385d3 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1479.789993] env[62619]: DEBUG nova.compute.manager [req-95e6636a-00e1-44c4-a628-7f374b131691 req-7994e001-e5cb-4e1f-8c3d-5675372c696c service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Received event network-vif-deleted-6ffb9058-5f89-4766-b8d7-716e21a551f4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1479.789993] env[62619]: INFO nova.compute.manager [req-95e6636a-00e1-44c4-a628-7f374b131691 req-7994e001-e5cb-4e1f-8c3d-5675372c696c service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Neutron deleted interface 6ffb9058-5f89-4766-b8d7-716e21a551f4; detaching it from the instance and deleting it from the info cache [ 1479.790205] env[62619]: DEBUG nova.network.neutron [req-95e6636a-00e1-44c4-a628-7f374b131691 req-7994e001-e5cb-4e1f-8c3d-5675372c696c service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.833992] env[62619]: INFO nova.compute.manager [-] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Took 1.55 seconds to deallocate network for instance. [ 1479.847824] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777444, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.668387} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.848174] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 40eeb844-7423-4818-8095-81062c7e6392/40eeb844-7423-4818-8095-81062c7e6392.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1479.848383] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1479.848626] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7524fb29-2330-4064-aead-828dcbf1fb67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.855990] env[62619]: INFO nova.compute.manager [-] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Took 1.61 seconds to deallocate network for instance. [ 1479.856339] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for the task: (returnval){ [ 1479.856339] env[62619]: value = "task-1777447" [ 1479.856339] env[62619]: _type = "Task" [ 1479.856339] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.871506] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777447, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.021156] env[62619]: DEBUG nova.compute.manager [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1480.022789] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.047216] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1480.047463] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1480.047673] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1480.047782] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1480.047923] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1480.050145] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1480.050145] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1480.050145] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1480.050145] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1480.050145] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1480.050145] env[62619]: DEBUG nova.virt.hardware [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1480.051312] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7548e294-9ba5-4cdf-8ec3-c07ec705c916 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.068956] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db84adf5-f744-40c1-83a6-128f88c54ead {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.075976] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777446, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105002} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.076993] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f4e31fa-99ce-4bc9-964d-7d9bd823f57b req-542802aa-d3d6-4c6a-bf27-aece8f55b913 service nova] Releasing lock "refresh_cache-40eeb844-7423-4818-8095-81062c7e6392" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.077631] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1480.079177] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4ddf4a-474b-4368-8220-1ff7967fe772 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.109434] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 597c0f95-5798-4022-8e2e-89a700698d7a/597c0f95-5798-4022-8e2e-89a700698d7a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1480.109900] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-136ac2f1-566d-4091-ad58-30142c2090c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.130652] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for the task: (returnval){ [ 1480.130652] env[62619]: value = "task-1777448" [ 1480.130652] env[62619]: _type = "Task" [ 1480.130652] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.139717] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777448, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.223674] env[62619]: DEBUG nova.scheduler.client.report [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1480.295346] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-348e03c6-2a9b-43f5-8883-13fc74c69efd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.311499] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1f96f2-69b8-49b6-90f9-e15d9c620b1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.350394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.350895] env[62619]: DEBUG nova.compute.manager [req-95e6636a-00e1-44c4-a628-7f374b131691 req-7994e001-e5cb-4e1f-8c3d-5675372c696c service nova] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Detach interface failed, port_id=6ffb9058-5f89-4766-b8d7-716e21a551f4, reason: Instance 2a41be15-efaf-4e78-a278-2711cb11e98f could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1480.369568] env[62619]: DEBUG oslo_concurrency.lockutils [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.370052] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777447, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066396} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.370406] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1480.371463] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9880f78f-0540-468c-ae00-0c85a9fcc830 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.401753] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 40eeb844-7423-4818-8095-81062c7e6392/40eeb844-7423-4818-8095-81062c7e6392.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1480.402167] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91f7cfea-b2ca-4d2c-8e62-2f424a1e0612 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.427831] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for the task: (returnval){ [ 1480.427831] env[62619]: value = "task-1777449" [ 1480.427831] env[62619]: _type = "Task" [ 1480.427831] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.438384] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777449, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.512905] env[62619]: DEBUG nova.compute.manager [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1480.514505] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa61ae9c-5bec-401a-bfea-9a23031a988e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.640747] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777448, 'name': ReconfigVM_Task, 'duration_secs': 0.379938} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.641057] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 597c0f95-5798-4022-8e2e-89a700698d7a/597c0f95-5798-4022-8e2e-89a700698d7a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1480.641661] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-563bbbee-3c14-4585-adc7-dcc68703997b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.650104] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for the task: (returnval){ [ 1480.650104] env[62619]: value = "task-1777450" [ 1480.650104] env[62619]: _type = "Task" [ 1480.650104] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.672829] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777450, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.729913] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.744s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.730592] env[62619]: DEBUG nova.compute.manager [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1480.734363] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.474s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.734724] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.737498] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.931s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.738242] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.740816] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.004s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.741112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.743162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.618s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.745689] env[62619]: INFO nova.compute.claims [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1480.761659] env[62619]: DEBUG nova.network.neutron [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Successfully updated port: 1e21d673-0db1-49d8-b86e-c8b8568b7452 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1480.819096] env[62619]: INFO nova.scheduler.client.report [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Deleted allocations for instance 312aed5b-a66e-4428-ac1b-483dc2b38291 [ 1480.823938] env[62619]: INFO nova.scheduler.client.report [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Deleted allocations for instance e9ca5148-f188-4a15-83ae-8f3d730b0dab [ 1480.839717] env[62619]: INFO nova.scheduler.client.report [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Deleted allocations for instance e81c03f7-9c0e-46bd-9641-aced82038eca [ 1480.869235] env[62619]: DEBUG nova.compute.manager [req-462924b3-43f0-437b-8a83-7a795a57f516 req-a433925d-97ef-400c-bff3-ede023178f2e service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Received event network-vif-plugged-1e21d673-0db1-49d8-b86e-c8b8568b7452 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1480.869561] env[62619]: DEBUG oslo_concurrency.lockutils [req-462924b3-43f0-437b-8a83-7a795a57f516 req-a433925d-97ef-400c-bff3-ede023178f2e service nova] Acquiring lock "cef20063-96f0-46cc-9f7d-4436b60216c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.869628] env[62619]: DEBUG oslo_concurrency.lockutils [req-462924b3-43f0-437b-8a83-7a795a57f516 req-a433925d-97ef-400c-bff3-ede023178f2e service nova] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.869792] env[62619]: DEBUG oslo_concurrency.lockutils [req-462924b3-43f0-437b-8a83-7a795a57f516 req-a433925d-97ef-400c-bff3-ede023178f2e service nova] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.869952] env[62619]: DEBUG nova.compute.manager [req-462924b3-43f0-437b-8a83-7a795a57f516 req-a433925d-97ef-400c-bff3-ede023178f2e service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] No waiting events found dispatching network-vif-plugged-1e21d673-0db1-49d8-b86e-c8b8568b7452 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1480.870428] env[62619]: WARNING nova.compute.manager [req-462924b3-43f0-437b-8a83-7a795a57f516 req-a433925d-97ef-400c-bff3-ede023178f2e service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Received unexpected event network-vif-plugged-1e21d673-0db1-49d8-b86e-c8b8568b7452 for instance with vm_state building and task_state spawning. [ 1480.943788] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777449, 'name': ReconfigVM_Task, 'duration_secs': 0.327206} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.944108] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 40eeb844-7423-4818-8095-81062c7e6392/40eeb844-7423-4818-8095-81062c7e6392.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1480.944743] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73ff0262-e9e3-4279-bec2-884e4edc7bc8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.954288] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for the task: (returnval){ [ 1480.954288] env[62619]: value = "task-1777451" [ 1480.954288] env[62619]: _type = "Task" [ 1480.954288] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.968275] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777451, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.028988] env[62619]: INFO nova.compute.manager [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] instance snapshotting [ 1481.032215] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49d11e4-ff08-4c47-b23c-b209a1844d39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.056287] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880b24b2-b0e5-41cc-b0af-fba2069befdc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.161577] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777450, 'name': Rename_Task, 'duration_secs': 0.162352} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.162438] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1481.162438] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06310e06-c6ab-453f-bcd4-51f8d65393ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.170672] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for the task: (returnval){ [ 1481.170672] env[62619]: value = "task-1777452" [ 1481.170672] env[62619]: _type = "Task" [ 1481.170672] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.182745] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777452, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.253065] env[62619]: DEBUG nova.compute.utils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1481.256737] env[62619]: DEBUG nova.compute.manager [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1481.256909] env[62619]: DEBUG nova.network.neutron [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1481.264851] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "refresh_cache-cef20063-96f0-46cc-9f7d-4436b60216c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.265088] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquired lock "refresh_cache-cef20063-96f0-46cc-9f7d-4436b60216c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.265458] env[62619]: DEBUG nova.network.neutron [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1481.302353] env[62619]: DEBUG nova.policy [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f5836e62ab7440fa798f7bea287572e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed0964fc0c414168b3027730645f7ee8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1481.330850] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79eb6552-fb3f-4888-810c-cc473d780a2f tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "312aed5b-a66e-4428-ac1b-483dc2b38291" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.273s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.333012] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190078a3-5e62-438d-ae8b-8b9879bc5c02 tempest-ListServersNegativeTestJSON-356677094 tempest-ListServersNegativeTestJSON-356677094-project-member] Lock "e9ca5148-f188-4a15-83ae-8f3d730b0dab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.347s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.349390] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db82350b-09f4-43c1-98a9-ec9c5f2daf6e tempest-ServersAdmin275Test-1560223435 tempest-ServersAdmin275Test-1560223435-project-member] Lock "e81c03f7-9c0e-46bd-9641-aced82038eca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.348s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.385933] env[62619]: DEBUG nova.compute.manager [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Received event network-changed-13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1481.386157] env[62619]: DEBUG nova.compute.manager [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Refreshing instance network info cache due to event network-changed-13b7e2bb-07ca-4faa-aa62-69635847b2f7. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1481.386372] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] Acquiring lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.386513] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] Acquired lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.386660] env[62619]: DEBUG nova.network.neutron [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Refreshing network info cache for port 13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1481.464225] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777451, 'name': Rename_Task, 'duration_secs': 0.142721} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.464509] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1481.464756] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fc411de-ec67-4e52-990e-2471e257e1fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.473843] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for the task: (returnval){ [ 1481.473843] env[62619]: value = "task-1777453" [ 1481.473843] env[62619]: _type = "Task" [ 1481.473843] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.483721] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777453, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.574560] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1481.574560] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3f3c65a2-c85a-4293-b989-c146f85ef685 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.583426] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1481.583426] env[62619]: value = "task-1777454" [ 1481.583426] env[62619]: _type = "Task" [ 1481.583426] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.588583] env[62619]: DEBUG nova.network.neutron [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Successfully created port: 4204271d-966d-4e43-9791-80b4443eccc9 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1481.596988] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777454, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.681439] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777452, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.759731] env[62619]: DEBUG nova.compute.manager [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1481.833937] env[62619]: DEBUG nova.network.neutron [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1481.993439] env[62619]: DEBUG oslo_vmware.api [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777453, 'name': PowerOnVM_Task, 'duration_secs': 0.494539} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.998396] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1482.000020] env[62619]: INFO nova.compute.manager [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1482.000020] env[62619]: DEBUG nova.compute.manager [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1482.005148] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a622af00-9876-408e-a6a0-37089021e75a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.094853] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777454, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.192897] env[62619]: DEBUG oslo_vmware.api [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777452, 'name': PowerOnVM_Task, 'duration_secs': 0.574545} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.192897] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1482.192897] env[62619]: INFO nova.compute.manager [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Took 4.89 seconds to spawn the instance on the hypervisor. [ 1482.192897] env[62619]: DEBUG nova.compute.manager [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1482.192897] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cf0440-d26c-4d2f-9b93-3dc8e099ff9b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.246442] env[62619]: DEBUG nova.network.neutron [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Updating instance_info_cache with network_info: [{"id": "1e21d673-0db1-49d8-b86e-c8b8568b7452", "address": "fa:16:3e:04:a9:2c", "network": {"id": "f93b590d-39d9-42a4-8c3f-d05fa75fdf10", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1677764334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4a61b4c4b2b42a1933ef647b146b530", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e21d673-0d", "ovs_interfaceid": "1e21d673-0db1-49d8-b86e-c8b8568b7452", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.344211] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5abe7a4f-5617-47ad-b3b0-449f1a99c605 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.354039] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912cfbcb-da91-4179-9907-5969cb79c871 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.398257] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f01c35-9bce-4c64-b7b7-efe625b301ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.407229] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e27497-65b4-4eb9-a5d1-4b7cbf84ddd7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.423078] env[62619]: DEBUG nova.compute.provider_tree [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.528245] env[62619]: INFO nova.compute.manager [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Took 42.80 seconds to build instance. [ 1482.596434] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777454, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.645872] env[62619]: DEBUG nova.network.neutron [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Updated VIF entry in instance network info cache for port 13b7e2bb-07ca-4faa-aa62-69635847b2f7. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1482.646269] env[62619]: DEBUG nova.network.neutron [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Updating instance_info_cache with network_info: [{"id": "13b7e2bb-07ca-4faa-aa62-69635847b2f7", "address": "fa:16:3e:07:1c:ed", "network": {"id": "d28ddb7a-9db3-465f-8343-8f23b12b5183", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2033557069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "333f40a8350d4a4586cd2236bc63bef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13b7e2bb-07", "ovs_interfaceid": "13b7e2bb-07ca-4faa-aa62-69635847b2f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.715217] env[62619]: INFO nova.compute.manager [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Took 40.75 seconds to build instance. [ 1482.749171] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Releasing lock "refresh_cache-cef20063-96f0-46cc-9f7d-4436b60216c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.749689] env[62619]: DEBUG nova.compute.manager [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Instance network_info: |[{"id": "1e21d673-0db1-49d8-b86e-c8b8568b7452", "address": "fa:16:3e:04:a9:2c", "network": {"id": "f93b590d-39d9-42a4-8c3f-d05fa75fdf10", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1677764334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4a61b4c4b2b42a1933ef647b146b530", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e21d673-0d", "ovs_interfaceid": "1e21d673-0db1-49d8-b86e-c8b8568b7452", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1482.750394] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:a9:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e21d673-0db1-49d8-b86e-c8b8568b7452', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1482.761611] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Creating folder: Project (a4a61b4c4b2b42a1933ef647b146b530). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1482.762077] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb2a6191-9c96-43a8-a734-dbe1eed02ee7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.777118] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Created folder: Project (a4a61b4c4b2b42a1933ef647b146b530) in parent group-v368875. [ 1482.777340] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Creating folder: Instances. Parent ref: group-v368979. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1482.778490] env[62619]: DEBUG nova.compute.manager [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1482.780457] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e16bd001-1f08-4c30-b057-57d5f58cd0be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.792497] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Created folder: Instances in parent group-v368979. [ 1482.792764] env[62619]: DEBUG oslo.service.loopingcall [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1482.793348] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1482.793348] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c441b47-3b6e-4caa-9711-a11dd1694eb3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.817953] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1482.817953] env[62619]: value = "task-1777457" [ 1482.817953] env[62619]: _type = "Task" [ 1482.817953] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.820205] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1482.820435] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1482.820588] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1482.820764] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1482.820904] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1482.821054] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1482.821258] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1482.821408] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1482.821567] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1482.821721] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1482.821891] env[62619]: DEBUG nova.virt.hardware [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1482.822787] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12b180a-85b3-4a71-9b74-9c42474423b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.836467] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1042ce8-c9f2-44c8-8eaa-887dfd70f318 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.840673] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777457, 'name': CreateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.929644] env[62619]: DEBUG nova.scheduler.client.report [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1483.030795] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9530aea-74d5-4ecc-aa89-2c9a611ec23c tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Lock "40eeb844-7423-4818-8095-81062c7e6392" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.610s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.098467] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777454, 'name': CreateSnapshot_Task, 'duration_secs': 1.188535} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.100114] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1483.101324] env[62619]: DEBUG nova.compute.manager [req-1cc6858b-307f-426b-9d4a-8a6a7c2688de req-31cd5f01-7239-4a47-8d49-82dd15cf24c1 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Received event network-changed-1e21d673-0db1-49d8-b86e-c8b8568b7452 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1483.101839] env[62619]: DEBUG nova.compute.manager [req-1cc6858b-307f-426b-9d4a-8a6a7c2688de req-31cd5f01-7239-4a47-8d49-82dd15cf24c1 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Refreshing instance network info cache due to event network-changed-1e21d673-0db1-49d8-b86e-c8b8568b7452. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1483.102087] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cc6858b-307f-426b-9d4a-8a6a7c2688de req-31cd5f01-7239-4a47-8d49-82dd15cf24c1 service nova] Acquiring lock "refresh_cache-cef20063-96f0-46cc-9f7d-4436b60216c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.102237] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cc6858b-307f-426b-9d4a-8a6a7c2688de req-31cd5f01-7239-4a47-8d49-82dd15cf24c1 service nova] Acquired lock "refresh_cache-cef20063-96f0-46cc-9f7d-4436b60216c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.102715] env[62619]: DEBUG nova.network.neutron [req-1cc6858b-307f-426b-9d4a-8a6a7c2688de req-31cd5f01-7239-4a47-8d49-82dd15cf24c1 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Refreshing network info cache for port 1e21d673-0db1-49d8-b86e-c8b8568b7452 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1483.104530] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0aa69e-e66b-4bb0-9ee0-c1008be4c9c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.149623] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] Releasing lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.150532] env[62619]: DEBUG nova.compute.manager [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Received event network-changed-13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1483.151062] env[62619]: DEBUG nova.compute.manager [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Refreshing instance network info cache due to event network-changed-13b7e2bb-07ca-4faa-aa62-69635847b2f7. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1483.153017] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] Acquiring lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.153017] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] Acquired lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.153017] env[62619]: DEBUG nova.network.neutron [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Refreshing network info cache for port 13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1483.219446] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4324f21-b0cc-4971-b9a7-2d53d0d8bcbf tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lock "597c0f95-5798-4022-8e2e-89a700698d7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.119s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.336093] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777457, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.393865] env[62619]: DEBUG nova.network.neutron [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Successfully updated port: 4204271d-966d-4e43-9791-80b4443eccc9 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1483.435820] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.436373] env[62619]: DEBUG nova.compute.manager [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1483.439074] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.112s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.440905] env[62619]: INFO nova.compute.claims [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1483.537554] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1483.634468] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1483.635802] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9c9865fd-bfae-46b8-9dfb-d177d3452830 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.668365] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1483.668365] env[62619]: value = "task-1777458" [ 1483.668365] env[62619]: _type = "Task" [ 1483.668365] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.678246] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777458, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.722957] env[62619]: DEBUG nova.compute.manager [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1483.840141] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777457, 'name': CreateVM_Task, 'duration_secs': 0.766564} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.842208] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1483.843689] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.847017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.847017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1483.847017] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e4527fe-ef8f-4449-bdb7-68a2c56f9cc8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.853177] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1483.853177] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dda4e7-17a0-f649-f581-76898859b7e1" [ 1483.853177] env[62619]: _type = "Task" [ 1483.853177] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.861460] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dda4e7-17a0-f649-f581-76898859b7e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.898222] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "refresh_cache-060427a2-e724-4c51-879e-675154ae5df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.898222] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired lock "refresh_cache-060427a2-e724-4c51-879e-675154ae5df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.898222] env[62619]: DEBUG nova.network.neutron [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1483.947020] env[62619]: DEBUG nova.compute.utils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1483.950753] env[62619]: DEBUG nova.compute.manager [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1483.956057] env[62619]: DEBUG nova.network.neutron [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1484.022021] env[62619]: DEBUG nova.policy [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe6ee1c8d8ef4b718274da7be4f5fd01', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c82fb42e93ff479b971f49eb92f50832', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1484.031237] env[62619]: DEBUG nova.network.neutron [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Updated VIF entry in instance network info cache for port 13b7e2bb-07ca-4faa-aa62-69635847b2f7. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1484.031237] env[62619]: DEBUG nova.network.neutron [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Updating instance_info_cache with network_info: [{"id": "13b7e2bb-07ca-4faa-aa62-69635847b2f7", "address": "fa:16:3e:07:1c:ed", "network": {"id": "d28ddb7a-9db3-465f-8343-8f23b12b5183", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2033557069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "333f40a8350d4a4586cd2236bc63bef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13b7e2bb-07", "ovs_interfaceid": "13b7e2bb-07ca-4faa-aa62-69635847b2f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.072042] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.186169] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777458, 'name': CloneVM_Task} progress is 93%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.245886] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.317815] env[62619]: DEBUG nova.network.neutron [req-1cc6858b-307f-426b-9d4a-8a6a7c2688de req-31cd5f01-7239-4a47-8d49-82dd15cf24c1 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Updated VIF entry in instance network info cache for port 1e21d673-0db1-49d8-b86e-c8b8568b7452. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1484.318250] env[62619]: DEBUG nova.network.neutron [req-1cc6858b-307f-426b-9d4a-8a6a7c2688de req-31cd5f01-7239-4a47-8d49-82dd15cf24c1 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Updating instance_info_cache with network_info: [{"id": "1e21d673-0db1-49d8-b86e-c8b8568b7452", "address": "fa:16:3e:04:a9:2c", "network": {"id": "f93b590d-39d9-42a4-8c3f-d05fa75fdf10", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1677764334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4a61b4c4b2b42a1933ef647b146b530", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e21d673-0d", "ovs_interfaceid": "1e21d673-0db1-49d8-b86e-c8b8568b7452", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.368616] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dda4e7-17a0-f649-f581-76898859b7e1, 'name': SearchDatastore_Task, 'duration_secs': 0.022344} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.368826] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.369577] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1484.370132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.370132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.370132] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1484.371573] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ce07d7c-d66e-4b40-ab80-c3b8a8cddbe7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.383684] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1484.383684] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1484.385394] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dd3be0a-7d16-471a-b212-e3fffdb585c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.394752] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1484.394752] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528b4b52-6fec-3d4d-918f-bf885aac87e4" [ 1484.394752] env[62619]: _type = "Task" [ 1484.394752] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.408777] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528b4b52-6fec-3d4d-918f-bf885aac87e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.455038] env[62619]: DEBUG nova.compute.manager [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1484.464482] env[62619]: DEBUG nova.network.neutron [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1484.518355] env[62619]: DEBUG nova.compute.manager [None req-aaacbb2e-c10c-4a98-95c8-b0429f1c4366 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1484.523022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e53145-8aaf-4821-ad96-4417fde9d4c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.532106] env[62619]: DEBUG oslo_concurrency.lockutils [req-a0b3808f-8625-4298-ba12-c477532f3e8d req-56528305-096d-43e7-bdd9-6478d134f102 service nova] Releasing lock "refresh_cache-9014ef05-64d1-4bd6-9f2e-db58003b6520" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.554020] env[62619]: DEBUG nova.network.neutron [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Successfully created port: 7df13a55-7d53-41b9-9489-591516bda30c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1484.677422] env[62619]: DEBUG nova.network.neutron [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Updating instance_info_cache with network_info: [{"id": "4204271d-966d-4e43-9791-80b4443eccc9", "address": "fa:16:3e:b6:34:89", "network": {"id": "15f89bb2-20f5-4ac8-8688-bfeed19bc7c0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1093042133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed0964fc0c414168b3027730645f7ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4204271d-96", "ovs_interfaceid": "4204271d-966d-4e43-9791-80b4443eccc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.685373] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777458, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.826556] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cc6858b-307f-426b-9d4a-8a6a7c2688de req-31cd5f01-7239-4a47-8d49-82dd15cf24c1 service nova] Releasing lock "refresh_cache-cef20063-96f0-46cc-9f7d-4436b60216c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.846618] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquiring lock "597c0f95-5798-4022-8e2e-89a700698d7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.846686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lock "597c0f95-5798-4022-8e2e-89a700698d7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.847211] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquiring lock "597c0f95-5798-4022-8e2e-89a700698d7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.847690] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lock "597c0f95-5798-4022-8e2e-89a700698d7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.847871] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lock "597c0f95-5798-4022-8e2e-89a700698d7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.852820] env[62619]: INFO nova.compute.manager [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Terminating instance [ 1484.907765] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528b4b52-6fec-3d4d-918f-bf885aac87e4, 'name': SearchDatastore_Task, 'duration_secs': 0.015494} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.908777] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fa66f41-0d60-46d1-b6ba-207a6541f459 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.917468] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1484.917468] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fbfcc4-e58d-920b-064f-a3115777e774" [ 1484.917468] env[62619]: _type = "Task" [ 1484.917468] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.926532] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fbfcc4-e58d-920b-064f-a3115777e774, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.007117] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6dc6d6-0b7e-4c8e-a37f-7aa3235fa487 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.018709] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e1b13f-c7a6-4e0f-8ea3-e69ae4c9677c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.051945] env[62619]: INFO nova.compute.manager [None req-aaacbb2e-c10c-4a98-95c8-b0429f1c4366 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] instance snapshotting [ 1485.052555] env[62619]: DEBUG nova.objects.instance [None req-aaacbb2e-c10c-4a98-95c8-b0429f1c4366 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lazy-loading 'flavor' on Instance uuid 597c0f95-5798-4022-8e2e-89a700698d7a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1485.055071] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa07f75-293e-4afc-91cb-317b41335205 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.063532] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ddba9f-ae12-4e27-8a69-2a274a543fbf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.080401] env[62619]: DEBUG nova.compute.provider_tree [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.184333] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Releasing lock "refresh_cache-060427a2-e724-4c51-879e-675154ae5df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.185229] env[62619]: DEBUG nova.compute.manager [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Instance network_info: |[{"id": "4204271d-966d-4e43-9791-80b4443eccc9", "address": "fa:16:3e:b6:34:89", "network": {"id": "15f89bb2-20f5-4ac8-8688-bfeed19bc7c0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1093042133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed0964fc0c414168b3027730645f7ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4204271d-96", "ovs_interfaceid": "4204271d-966d-4e43-9791-80b4443eccc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1485.185229] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777458, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.185399] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:34:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4204271d-966d-4e43-9791-80b4443eccc9', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1485.193487] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Creating folder: Project (ed0964fc0c414168b3027730645f7ee8). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1485.193760] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f70398e-207f-42bd-81e4-547d7b628fc6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.203270] env[62619]: DEBUG nova.compute.manager [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Received event network-vif-plugged-4204271d-966d-4e43-9791-80b4443eccc9 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1485.203498] env[62619]: DEBUG oslo_concurrency.lockutils [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] Acquiring lock "060427a2-e724-4c51-879e-675154ae5df2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1485.203679] env[62619]: DEBUG oslo_concurrency.lockutils [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] Lock "060427a2-e724-4c51-879e-675154ae5df2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.203904] env[62619]: DEBUG oslo_concurrency.lockutils [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] Lock "060427a2-e724-4c51-879e-675154ae5df2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.204131] env[62619]: DEBUG nova.compute.manager [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] No waiting events found dispatching network-vif-plugged-4204271d-966d-4e43-9791-80b4443eccc9 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1485.204309] env[62619]: WARNING nova.compute.manager [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Received unexpected event network-vif-plugged-4204271d-966d-4e43-9791-80b4443eccc9 for instance with vm_state building and task_state spawning. [ 1485.204470] env[62619]: DEBUG nova.compute.manager [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Received event network-changed-4204271d-966d-4e43-9791-80b4443eccc9 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1485.204622] env[62619]: DEBUG nova.compute.manager [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Refreshing instance network info cache due to event network-changed-4204271d-966d-4e43-9791-80b4443eccc9. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1485.204806] env[62619]: DEBUG oslo_concurrency.lockutils [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] Acquiring lock "refresh_cache-060427a2-e724-4c51-879e-675154ae5df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.204939] env[62619]: DEBUG oslo_concurrency.lockutils [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] Acquired lock "refresh_cache-060427a2-e724-4c51-879e-675154ae5df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.205137] env[62619]: DEBUG nova.network.neutron [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Refreshing network info cache for port 4204271d-966d-4e43-9791-80b4443eccc9 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1485.208273] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Created folder: Project (ed0964fc0c414168b3027730645f7ee8) in parent group-v368875. [ 1485.208452] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Creating folder: Instances. Parent ref: group-v368983. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1485.208887] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0793aac5-9b77-49ae-b03d-617d74e0fb49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.221382] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Created folder: Instances in parent group-v368983. [ 1485.221490] env[62619]: DEBUG oslo.service.loopingcall [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1485.221639] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1485.221848] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-432efe55-3324-4331-8770-4569f60dfb8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.244771] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1485.244771] env[62619]: value = "task-1777461" [ 1485.244771] env[62619]: _type = "Task" [ 1485.244771] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.254699] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777461, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.357494] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquiring lock "refresh_cache-597c0f95-5798-4022-8e2e-89a700698d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.357678] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquired lock "refresh_cache-597c0f95-5798-4022-8e2e-89a700698d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.357866] env[62619]: DEBUG nova.network.neutron [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1485.431079] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fbfcc4-e58d-920b-064f-a3115777e774, 'name': SearchDatastore_Task, 'duration_secs': 0.012796} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.431079] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.431079] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cef20063-96f0-46cc-9f7d-4436b60216c6/cef20063-96f0-46cc-9f7d-4436b60216c6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1485.431853] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4bdbac6-4848-48cb-987d-87184a250542 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.439687] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1485.439687] env[62619]: value = "task-1777462" [ 1485.439687] env[62619]: _type = "Task" [ 1485.439687] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.453113] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777462, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.475964] env[62619]: DEBUG nova.compute.manager [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1485.503639] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1485.503826] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1485.504018] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1485.504223] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1485.504394] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1485.504637] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1485.504987] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1485.505153] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1485.505329] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1485.505492] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1485.505662] env[62619]: DEBUG nova.virt.hardware [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1485.506552] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c53625-0047-477f-9b89-c324e53834e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.516508] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb54f1b-33e7-45da-9fe8-4ea1934abcf4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.558919] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4644e884-4a38-4f5a-be7a-d62481978bc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.580036] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f17290-03ef-4acf-821d-7f0c72413e11 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.585022] env[62619]: DEBUG nova.scheduler.client.report [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1485.687956] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777458, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.759660] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777461, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.882230] env[62619]: DEBUG nova.network.neutron [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1485.958404] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777462, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.976415] env[62619]: DEBUG nova.network.neutron [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.091287] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.091287] env[62619]: DEBUG nova.compute.manager [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1486.095302] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.144s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.097433] env[62619]: INFO nova.compute.claims [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1486.103509] env[62619]: DEBUG nova.compute.manager [None req-aaacbb2e-c10c-4a98-95c8-b0429f1c4366 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Instance disappeared during snapshot {{(pid=62619) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1486.151576] env[62619]: DEBUG nova.network.neutron [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Updated VIF entry in instance network info cache for port 4204271d-966d-4e43-9791-80b4443eccc9. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1486.151979] env[62619]: DEBUG nova.network.neutron [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Updating instance_info_cache with network_info: [{"id": "4204271d-966d-4e43-9791-80b4443eccc9", "address": "fa:16:3e:b6:34:89", "network": {"id": "15f89bb2-20f5-4ac8-8688-bfeed19bc7c0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1093042133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed0964fc0c414168b3027730645f7ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4204271d-96", "ovs_interfaceid": "4204271d-966d-4e43-9791-80b4443eccc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.187317] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777458, 'name': CloneVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.260761] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777461, 'name': CreateVM_Task, 'duration_secs': 0.5276} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.262165] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1486.268141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.268141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.268141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1486.268141] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faa67cbb-0005-41d2-94f8-e8514afe2863 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.271470] env[62619]: DEBUG nova.compute.manager [None req-aaacbb2e-c10c-4a98-95c8-b0429f1c4366 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Found 0 images (rotation: 2) {{(pid=62619) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4924}} [ 1486.275556] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1486.275556] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5282422c-1c7f-38a1-4af9-17fd0f090c77" [ 1486.275556] env[62619]: _type = "Task" [ 1486.275556] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.288976] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5282422c-1c7f-38a1-4af9-17fd0f090c77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.412067] env[62619]: DEBUG nova.compute.manager [req-8992277c-1220-4b88-8ba5-c9141cb69160 req-714a7ab8-dece-4438-83b2-61fae9630061 service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Received event network-vif-plugged-7df13a55-7d53-41b9-9489-591516bda30c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1486.412067] env[62619]: DEBUG oslo_concurrency.lockutils [req-8992277c-1220-4b88-8ba5-c9141cb69160 req-714a7ab8-dece-4438-83b2-61fae9630061 service nova] Acquiring lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.412252] env[62619]: DEBUG oslo_concurrency.lockutils [req-8992277c-1220-4b88-8ba5-c9141cb69160 req-714a7ab8-dece-4438-83b2-61fae9630061 service nova] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.412431] env[62619]: DEBUG oslo_concurrency.lockutils [req-8992277c-1220-4b88-8ba5-c9141cb69160 req-714a7ab8-dece-4438-83b2-61fae9630061 service nova] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.412552] env[62619]: DEBUG nova.compute.manager [req-8992277c-1220-4b88-8ba5-c9141cb69160 req-714a7ab8-dece-4438-83b2-61fae9630061 service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] No waiting events found dispatching network-vif-plugged-7df13a55-7d53-41b9-9489-591516bda30c {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1486.412771] env[62619]: WARNING nova.compute.manager [req-8992277c-1220-4b88-8ba5-c9141cb69160 req-714a7ab8-dece-4438-83b2-61fae9630061 service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Received unexpected event network-vif-plugged-7df13a55-7d53-41b9-9489-591516bda30c for instance with vm_state building and task_state spawning. [ 1486.450845] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777462, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577056} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.451153] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cef20063-96f0-46cc-9f7d-4436b60216c6/cef20063-96f0-46cc-9f7d-4436b60216c6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1486.451374] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1486.451677] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f75b8ee-5bc6-4cd9-81dc-6e77bfac470a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.459496] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1486.459496] env[62619]: value = "task-1777463" [ 1486.459496] env[62619]: _type = "Task" [ 1486.459496] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.469202] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777463, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.482167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Releasing lock "refresh_cache-597c0f95-5798-4022-8e2e-89a700698d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.482592] env[62619]: DEBUG nova.compute.manager [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1486.482807] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1486.483739] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5057644a-6d4b-4e3c-964d-3e975dd7544e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.491731] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1486.494325] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3510312c-986f-41b0-b069-801cb821d3eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.498979] env[62619]: DEBUG oslo_vmware.api [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for the task: (returnval){ [ 1486.498979] env[62619]: value = "task-1777464" [ 1486.498979] env[62619]: _type = "Task" [ 1486.498979] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.518429] env[62619]: DEBUG oslo_vmware.api [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.539491] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "0272ca2a-e9ff-4af5-8120-278a82d74627" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.539787] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "0272ca2a-e9ff-4af5-8120-278a82d74627" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.556665] env[62619]: DEBUG nova.network.neutron [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Successfully updated port: 7df13a55-7d53-41b9-9489-591516bda30c {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1486.610814] env[62619]: DEBUG nova.compute.utils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1486.613029] env[62619]: DEBUG nova.compute.manager [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1486.613252] env[62619]: DEBUG nova.network.neutron [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1486.654896] env[62619]: DEBUG oslo_concurrency.lockutils [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] Releasing lock "refresh_cache-060427a2-e724-4c51-879e-675154ae5df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.655201] env[62619]: DEBUG nova.compute.manager [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Received event network-changed-5c068735-1d99-4f86-a405-99a38588ba2b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1486.655376] env[62619]: DEBUG nova.compute.manager [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Refreshing instance network info cache due to event network-changed-5c068735-1d99-4f86-a405-99a38588ba2b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1486.655595] env[62619]: DEBUG oslo_concurrency.lockutils [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] Acquiring lock "refresh_cache-40eeb844-7423-4818-8095-81062c7e6392" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.655734] env[62619]: DEBUG oslo_concurrency.lockutils [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] Acquired lock "refresh_cache-40eeb844-7423-4818-8095-81062c7e6392" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.655891] env[62619]: DEBUG nova.network.neutron [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Refreshing network info cache for port 5c068735-1d99-4f86-a405-99a38588ba2b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1486.665117] env[62619]: DEBUG nova.policy [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e9094d6b3854c1184307d9bc35a966e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e11e1bca0c747fd8b4a0ca3e220ba4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1486.686445] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777458, 'name': CloneVM_Task, 'duration_secs': 2.569075} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.686714] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Created linked-clone VM from snapshot [ 1486.687483] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6709d809-ae89-4d2d-b293-f688fe6e68ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.696324] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Uploading image 71660a86-0f4d-4eef-8e06-761c0311eb57 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1486.724108] env[62619]: DEBUG oslo_vmware.rw_handles [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1486.724108] env[62619]: value = "vm-368982" [ 1486.724108] env[62619]: _type = "VirtualMachine" [ 1486.724108] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1486.724586] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2ab979f4-c31d-4c82-b6a9-769afd1a01de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.733125] env[62619]: DEBUG oslo_vmware.rw_handles [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lease: (returnval){ [ 1486.733125] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52806412-1f76-78ad-c3d2-67e677d8af4e" [ 1486.733125] env[62619]: _type = "HttpNfcLease" [ 1486.733125] env[62619]: } obtained for exporting VM: (result){ [ 1486.733125] env[62619]: value = "vm-368982" [ 1486.733125] env[62619]: _type = "VirtualMachine" [ 1486.733125] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1486.733579] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the lease: (returnval){ [ 1486.733579] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52806412-1f76-78ad-c3d2-67e677d8af4e" [ 1486.733579] env[62619]: _type = "HttpNfcLease" [ 1486.733579] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1486.742294] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1486.742294] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52806412-1f76-78ad-c3d2-67e677d8af4e" [ 1486.742294] env[62619]: _type = "HttpNfcLease" [ 1486.742294] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1486.786455] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5282422c-1c7f-38a1-4af9-17fd0f090c77, 'name': SearchDatastore_Task, 'duration_secs': 0.017375} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.786790] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.787062] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1486.787344] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.787520] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.787724] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1486.788029] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-767d734b-a362-4b68-8248-d3a5ad4f368a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.797412] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1486.797775] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1486.798666] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc81f901-db91-41c2-ade7-2ab23bf7db33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.804525] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1486.804525] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529419ae-8839-edf6-8a60-8164af097c75" [ 1486.804525] env[62619]: _type = "Task" [ 1486.804525] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.816087] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529419ae-8839-edf6-8a60-8164af097c75, 'name': SearchDatastore_Task} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.817354] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a8ab013-2c4f-4e66-8117-e76d70199c80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.823213] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1486.823213] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52352aac-21a5-5f22-168b-38d8b4bda222" [ 1486.823213] env[62619]: _type = "Task" [ 1486.823213] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.832455] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52352aac-21a5-5f22-168b-38d8b4bda222, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.969999] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777463, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080974} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.970599] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1486.971530] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95df149e-0c9b-4b61-8cf0-b3267eb77027 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.007153] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] cef20063-96f0-46cc-9f7d-4436b60216c6/cef20063-96f0-46cc-9f7d-4436b60216c6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1487.007153] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6126ab3-94ff-415c-89f0-b13287afc3fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.027521] env[62619]: DEBUG nova.network.neutron [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Successfully created port: 3aadd3d9-5b8f-4d90-94a3-d818bbac3830 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.033499] env[62619]: DEBUG oslo_vmware.api [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777464, 'name': PowerOffVM_Task, 'duration_secs': 0.147098} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.035671] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1487.035671] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1487.035671] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1487.035671] env[62619]: value = "task-1777466" [ 1487.035671] env[62619]: _type = "Task" [ 1487.035671] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.035913] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-faf114c1-be8d-401f-8548-56633124e60e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.051086] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777466, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.061996] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.062185] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.062346] env[62619]: DEBUG nova.network.neutron [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1487.075578] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1487.075798] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1487.075974] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Deleting the datastore file [datastore1] 597c0f95-5798-4022-8e2e-89a700698d7a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1487.076568] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-650c2646-e563-4c28-b263-c584c7192c24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.084713] env[62619]: DEBUG oslo_vmware.api [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for the task: (returnval){ [ 1487.084713] env[62619]: value = "task-1777468" [ 1487.084713] env[62619]: _type = "Task" [ 1487.084713] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.094884] env[62619]: DEBUG oslo_vmware.api [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777468, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.117054] env[62619]: DEBUG nova.compute.manager [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1487.244679] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1487.244679] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52806412-1f76-78ad-c3d2-67e677d8af4e" [ 1487.244679] env[62619]: _type = "HttpNfcLease" [ 1487.244679] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1487.245036] env[62619]: DEBUG oslo_vmware.rw_handles [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1487.245036] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52806412-1f76-78ad-c3d2-67e677d8af4e" [ 1487.245036] env[62619]: _type = "HttpNfcLease" [ 1487.245036] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1487.245795] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b17732-1152-4a1a-b217-1fc6a29d1422 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.258248] env[62619]: DEBUG oslo_vmware.rw_handles [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529e31c6-4dc0-69ae-417d-f30ad4f01b10/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1487.258461] env[62619]: DEBUG oslo_vmware.rw_handles [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529e31c6-4dc0-69ae-417d-f30ad4f01b10/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1487.339535] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52352aac-21a5-5f22-168b-38d8b4bda222, 'name': SearchDatastore_Task, 'duration_secs': 0.009329} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.343543] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.343543] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 060427a2-e724-4c51-879e-675154ae5df2/060427a2-e724-4c51-879e-675154ae5df2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1487.343543] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7538dda4-2e3c-4115-a45f-2ed3edfaca36 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.351312] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1487.351312] env[62619]: value = "task-1777469" [ 1487.351312] env[62619]: _type = "Task" [ 1487.351312] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.360642] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.441245] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-50e3d986-a8e9-4045-9e98-1c714f4a8062 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.546512] env[62619]: DEBUG nova.network.neutron [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Updated VIF entry in instance network info cache for port 5c068735-1d99-4f86-a405-99a38588ba2b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1487.546645] env[62619]: DEBUG nova.network.neutron [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Updating instance_info_cache with network_info: [{"id": "5c068735-1d99-4f86-a405-99a38588ba2b", "address": "fa:16:3e:49:d7:a2", "network": {"id": "a2de4aa2-b0a7-4709-bd23-0f0a97e6683e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358443003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8eb3cd57aa0c47798c99d55dbdf98126", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c068735-1d", "ovs_interfaceid": "5c068735-1d99-4f86-a405-99a38588ba2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.556638] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777466, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.604115] env[62619]: DEBUG oslo_vmware.api [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Task: {'id': task-1777468, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170338} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.604438] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1487.604743] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1487.605042] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1487.605702] env[62619]: INFO nova.compute.manager [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1487.605985] env[62619]: DEBUG oslo.service.loopingcall [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1487.606233] env[62619]: DEBUG nova.compute.manager [-] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1487.606342] env[62619]: DEBUG nova.network.neutron [-] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1487.611271] env[62619]: DEBUG nova.network.neutron [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1487.641448] env[62619]: DEBUG nova.network.neutron [-] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1487.792142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3426017d-c609-4a9a-a79b-4cc151552a9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.804168] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879800ab-81b1-456a-b03c-25bc63aeedb4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.842080] env[62619]: DEBUG nova.network.neutron [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance_info_cache with network_info: [{"id": "7df13a55-7d53-41b9-9489-591516bda30c", "address": "fa:16:3e:aa:15:ba", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df13a55-7d", "ovs_interfaceid": "7df13a55-7d53-41b9-9489-591516bda30c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.846226] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f47a00d-cb56-46bd-8690-d69b6dc5dd46 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.860594] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab82d478-7d6b-4335-80df-1a4d4317d38d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.868989] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777469, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.881086] env[62619]: DEBUG nova.compute.provider_tree [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.049299] env[62619]: DEBUG oslo_concurrency.lockutils [req-edec153f-705b-4b33-9e7a-5e623ccc4299 req-20c7bbf9-5c8d-4c45-878b-5acd1898fbc6 service nova] Releasing lock "refresh_cache-40eeb844-7423-4818-8095-81062c7e6392" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.049833] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777466, 'name': ReconfigVM_Task, 'duration_secs': 0.880853} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.050194] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Reconfigured VM instance instance-00000025 to attach disk [datastore1] cef20063-96f0-46cc-9f7d-4436b60216c6/cef20063-96f0-46cc-9f7d-4436b60216c6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1488.050886] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d36daf0-c3b1-467e-8672-87946218af9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.060141] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1488.060141] env[62619]: value = "task-1777470" [ 1488.060141] env[62619]: _type = "Task" [ 1488.060141] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.070890] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777470, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.128416] env[62619]: DEBUG nova.compute.manager [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1488.143831] env[62619]: DEBUG nova.network.neutron [-] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.165215] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1488.165540] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1488.165577] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1488.169025] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1488.169025] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1488.169025] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1488.169025] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1488.169025] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1488.169025] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1488.169025] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1488.169025] env[62619]: DEBUG nova.virt.hardware [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1488.169025] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0229365-9f60-43a2-8ab0-e7c5c3cc6a02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.181248] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79515a98-eb8e-4689-82fe-a9b1c6e5127b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.346158] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.346906] env[62619]: DEBUG nova.compute.manager [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Instance network_info: |[{"id": "7df13a55-7d53-41b9-9489-591516bda30c", "address": "fa:16:3e:aa:15:ba", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df13a55-7d", "ovs_interfaceid": "7df13a55-7d53-41b9-9489-591516bda30c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1488.347136] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:15:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '816c6e38-e200-4544-8c5b-9fc3e16c5761', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7df13a55-7d53-41b9-9489-591516bda30c', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1488.356796] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating folder: Project (c82fb42e93ff479b971f49eb92f50832). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1488.360627] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8f677e1-add2-4116-b50c-fabf3c511463 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.374455] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542782} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.374721] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 060427a2-e724-4c51-879e-675154ae5df2/060427a2-e724-4c51-879e-675154ae5df2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1488.374928] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1488.375209] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f49f30d7-c96a-4d78-a3bb-02984eea27fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.380997] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Created folder: Project (c82fb42e93ff479b971f49eb92f50832) in parent group-v368875. [ 1488.381129] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating folder: Instances. Parent ref: group-v368986. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1488.381368] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0cedbd0-1318-4d35-a693-99daf2949353 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.385302] env[62619]: DEBUG nova.scheduler.client.report [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1488.390152] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1488.390152] env[62619]: value = "task-1777472" [ 1488.390152] env[62619]: _type = "Task" [ 1488.390152] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.402658] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777472, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.404419] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Created folder: Instances in parent group-v368986. [ 1488.405508] env[62619]: DEBUG oslo.service.loopingcall [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1488.405508] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1488.405508] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29df4623-2b61-485c-86f6-b89df8c30dcf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.430879] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1488.430879] env[62619]: value = "task-1777474" [ 1488.430879] env[62619]: _type = "Task" [ 1488.430879] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.442926] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777474, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.491537] env[62619]: DEBUG nova.compute.manager [req-29e750f5-df84-4698-bcf2-f405833ef0af req-67a67558-d579-4b41-8401-dc9d25f1e0de service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Received event network-changed-7df13a55-7d53-41b9-9489-591516bda30c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1488.491818] env[62619]: DEBUG nova.compute.manager [req-29e750f5-df84-4698-bcf2-f405833ef0af req-67a67558-d579-4b41-8401-dc9d25f1e0de service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Refreshing instance network info cache due to event network-changed-7df13a55-7d53-41b9-9489-591516bda30c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1488.492387] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e750f5-df84-4698-bcf2-f405833ef0af req-67a67558-d579-4b41-8401-dc9d25f1e0de service nova] Acquiring lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.493105] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e750f5-df84-4698-bcf2-f405833ef0af req-67a67558-d579-4b41-8401-dc9d25f1e0de service nova] Acquired lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.493823] env[62619]: DEBUG nova.network.neutron [req-29e750f5-df84-4698-bcf2-f405833ef0af req-67a67558-d579-4b41-8401-dc9d25f1e0de service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Refreshing network info cache for port 7df13a55-7d53-41b9-9489-591516bda30c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1488.573320] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777470, 'name': Rename_Task, 'duration_secs': 0.289606} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.573486] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1488.574243] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5afd1022-72bf-4178-a7ec-a79abc7cc9b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.583501] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1488.583501] env[62619]: value = "task-1777475" [ 1488.583501] env[62619]: _type = "Task" [ 1488.583501] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.595441] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.646741] env[62619]: INFO nova.compute.manager [-] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Took 1.04 seconds to deallocate network for instance. [ 1488.808718] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquiring lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.809151] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.891536] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.799s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.892160] env[62619]: DEBUG nova.compute.manager [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1488.895701] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.870s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.895928] env[62619]: DEBUG nova.objects.instance [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lazy-loading 'resources' on Instance uuid 7c058337-1684-4553-8e96-dd2cd1814a15 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1488.907247] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777472, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100477} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.907648] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1488.908510] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0838ca4f-d2b0-4664-8cdb-8f8f23ebfb80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.951390] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 060427a2-e724-4c51-879e-675154ae5df2/060427a2-e724-4c51-879e-675154ae5df2.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1488.951622] env[62619]: DEBUG nova.network.neutron [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Successfully updated port: 3aadd3d9-5b8f-4d90-94a3-d818bbac3830 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1488.954037] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-889b3b35-60e4-4fe2-803a-b0fb571e291b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.975349] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "refresh_cache-e32cb991-a018-4b55-8cdf-378e212c8434" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.975614] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "refresh_cache-e32cb991-a018-4b55-8cdf-378e212c8434" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.975794] env[62619]: DEBUG nova.network.neutron [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1488.991360] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777474, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.993453] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1488.993453] env[62619]: value = "task-1777476" [ 1488.993453] env[62619]: _type = "Task" [ 1488.993453] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.005210] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777476, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.094897] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777475, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.159161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.321389] env[62619]: DEBUG nova.network.neutron [req-29e750f5-df84-4698-bcf2-f405833ef0af req-67a67558-d579-4b41-8401-dc9d25f1e0de service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updated VIF entry in instance network info cache for port 7df13a55-7d53-41b9-9489-591516bda30c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1489.321925] env[62619]: DEBUG nova.network.neutron [req-29e750f5-df84-4698-bcf2-f405833ef0af req-67a67558-d579-4b41-8401-dc9d25f1e0de service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance_info_cache with network_info: [{"id": "7df13a55-7d53-41b9-9489-591516bda30c", "address": "fa:16:3e:aa:15:ba", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df13a55-7d", "ovs_interfaceid": "7df13a55-7d53-41b9-9489-591516bda30c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.397909] env[62619]: DEBUG nova.compute.utils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1489.402580] env[62619]: DEBUG nova.compute.manager [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1489.402580] env[62619]: DEBUG nova.network.neutron [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1489.466572] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777474, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.509531] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777476, 'name': ReconfigVM_Task, 'duration_secs': 0.450209} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.509921] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 060427a2-e724-4c51-879e-675154ae5df2/060427a2-e724-4c51-879e-675154ae5df2.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1489.510757] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-331a84e7-b2bb-4c75-8742-6069e99bd969 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.518116] env[62619]: DEBUG nova.network.neutron [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1489.522741] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1489.522741] env[62619]: value = "task-1777477" [ 1489.522741] env[62619]: _type = "Task" [ 1489.522741] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.539016] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777477, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.600907] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777475, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.696858] env[62619]: DEBUG nova.policy [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25159d73422b45dbbe4bab2b2a835055', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df50ba9d97ac4c059077c87f9cfdb719', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1489.705975] env[62619]: DEBUG nova.network.neutron [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Updating instance_info_cache with network_info: [{"id": "3aadd3d9-5b8f-4d90-94a3-d818bbac3830", "address": "fa:16:3e:7e:6a:12", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3aadd3d9-5b", "ovs_interfaceid": "3aadd3d9-5b8f-4d90-94a3-d818bbac3830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.825445] env[62619]: DEBUG oslo_concurrency.lockutils [req-29e750f5-df84-4698-bcf2-f405833ef0af req-67a67558-d579-4b41-8401-dc9d25f1e0de service nova] Releasing lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.906510] env[62619]: DEBUG nova.compute.manager [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1489.970928] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777474, 'name': CreateVM_Task, 'duration_secs': 1.108248} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.971160] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1489.971870] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.972114] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.972458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1489.972794] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-459d645f-e56c-4bae-a378-8c91682ab35c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.977911] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74db0257-189d-4605-9929-85b41f052aeb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.984831] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1489.984831] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dd4213-8bbd-0331-2078-7e464d6721a4" [ 1489.984831] env[62619]: _type = "Task" [ 1489.984831] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.989612] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d924987-cf1c-4116-8d21-6068316d753b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.996167] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dd4213-8bbd-0331-2078-7e464d6721a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.031601] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790c08a0-c651-4cf9-a0fd-94523f6d17e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.040674] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777477, 'name': Rename_Task, 'duration_secs': 0.194573} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.042895] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1490.043247] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b8d8e3c-7c6d-4b6b-ae9a-55e2e1726199 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.045961] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6b2a45-431c-45b2-b160-f70aaadc992d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.065508] env[62619]: DEBUG nova.compute.provider_tree [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1490.065667] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1490.065667] env[62619]: value = "task-1777478" [ 1490.065667] env[62619]: _type = "Task" [ 1490.065667] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.075111] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777478, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.096538] env[62619]: DEBUG oslo_vmware.api [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777475, 'name': PowerOnVM_Task, 'duration_secs': 1.061002} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.096877] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1490.097207] env[62619]: INFO nova.compute.manager [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Took 10.08 seconds to spawn the instance on the hypervisor. [ 1490.097419] env[62619]: DEBUG nova.compute.manager [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1490.098325] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0addd3-a454-4e87-91b0-67af19434740 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.155682] env[62619]: DEBUG nova.network.neutron [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Successfully created port: 2cb095b0-1732-485d-a7ea-c306699fa81f {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1490.211413] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "refresh_cache-e32cb991-a018-4b55-8cdf-378e212c8434" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.211569] env[62619]: DEBUG nova.compute.manager [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Instance network_info: |[{"id": "3aadd3d9-5b8f-4d90-94a3-d818bbac3830", "address": "fa:16:3e:7e:6a:12", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3aadd3d9-5b", "ovs_interfaceid": "3aadd3d9-5b8f-4d90-94a3-d818bbac3830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1490.212018] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:6a:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3aadd3d9-5b8f-4d90-94a3-d818bbac3830', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1490.219845] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Creating folder: Project (7e11e1bca0c747fd8b4a0ca3e220ba4e). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1490.220160] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47dc6b78-65ef-4233-8dfe-a94749e69fb3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.234183] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Created folder: Project (7e11e1bca0c747fd8b4a0ca3e220ba4e) in parent group-v368875. [ 1490.234888] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Creating folder: Instances. Parent ref: group-v368989. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1490.234888] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eeb10d16-d5cd-4c22-b2c3-a962d57a8528 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.248092] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Created folder: Instances in parent group-v368989. [ 1490.248280] env[62619]: DEBUG oslo.service.loopingcall [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1490.248436] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1490.248649] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7b618df-c452-473a-ad75-acd41afa13fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.271051] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1490.271051] env[62619]: value = "task-1777481" [ 1490.271051] env[62619]: _type = "Task" [ 1490.271051] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.280105] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777481, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.497295] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dd4213-8bbd-0331-2078-7e464d6721a4, 'name': SearchDatastore_Task, 'duration_secs': 0.020735} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.497295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.497295] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1490.497295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.497295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.497295] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1490.497295] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b5e4fc1-44ba-4cbb-97c1-7ea84008a736 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.512024] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1490.512024] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1490.512705] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81139b2b-6a5c-49cb-8b73-52591a961d8b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.530961] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1490.530961] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c1b5e-58d4-1f7e-6a1f-526a7be17e37" [ 1490.530961] env[62619]: _type = "Task" [ 1490.530961] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.544449] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c1b5e-58d4-1f7e-6a1f-526a7be17e37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.572190] env[62619]: DEBUG nova.scheduler.client.report [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1490.582105] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777478, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.624434] env[62619]: INFO nova.compute.manager [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Took 48.34 seconds to build instance. [ 1490.663221] env[62619]: DEBUG nova.compute.manager [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Received event network-vif-plugged-3aadd3d9-5b8f-4d90-94a3-d818bbac3830 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1490.663614] env[62619]: DEBUG oslo_concurrency.lockutils [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] Acquiring lock "e32cb991-a018-4b55-8cdf-378e212c8434-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.663840] env[62619]: DEBUG oslo_concurrency.lockutils [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] Lock "e32cb991-a018-4b55-8cdf-378e212c8434-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.663840] env[62619]: DEBUG oslo_concurrency.lockutils [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] Lock "e32cb991-a018-4b55-8cdf-378e212c8434-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.664224] env[62619]: DEBUG nova.compute.manager [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] No waiting events found dispatching network-vif-plugged-3aadd3d9-5b8f-4d90-94a3-d818bbac3830 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1490.665025] env[62619]: WARNING nova.compute.manager [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Received unexpected event network-vif-plugged-3aadd3d9-5b8f-4d90-94a3-d818bbac3830 for instance with vm_state building and task_state spawning. [ 1490.665220] env[62619]: DEBUG nova.compute.manager [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Received event network-changed-3aadd3d9-5b8f-4d90-94a3-d818bbac3830 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1490.665377] env[62619]: DEBUG nova.compute.manager [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Refreshing instance network info cache due to event network-changed-3aadd3d9-5b8f-4d90-94a3-d818bbac3830. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1490.665560] env[62619]: DEBUG oslo_concurrency.lockutils [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] Acquiring lock "refresh_cache-e32cb991-a018-4b55-8cdf-378e212c8434" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.665692] env[62619]: DEBUG oslo_concurrency.lockutils [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] Acquired lock "refresh_cache-e32cb991-a018-4b55-8cdf-378e212c8434" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.665844] env[62619]: DEBUG nova.network.neutron [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Refreshing network info cache for port 3aadd3d9-5b8f-4d90-94a3-d818bbac3830 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1490.782700] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777481, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.919475] env[62619]: DEBUG nova.compute.manager [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1490.957502] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1490.957502] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1490.957502] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1490.957502] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1490.957502] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1490.957502] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1490.957813] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1490.957884] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1490.958067] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1490.958248] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1490.958418] env[62619]: DEBUG nova.virt.hardware [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1490.959347] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f15769-af62-436a-a9cb-264ef3eb0c32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.970345] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfea821-fb5e-4090-91bd-84cef47a9d22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.046248] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c1b5e-58d4-1f7e-6a1f-526a7be17e37, 'name': SearchDatastore_Task, 'duration_secs': 0.013711} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.047128] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7f84315-0a4a-4e19-a0f9-ffe053f131ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.053845] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1491.053845] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b686f5-eceb-9774-c437-a3c5aeca3dcf" [ 1491.053845] env[62619]: _type = "Task" [ 1491.053845] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.062790] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b686f5-eceb-9774-c437-a3c5aeca3dcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.075895] env[62619]: DEBUG oslo_vmware.api [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777478, 'name': PowerOnVM_Task, 'duration_secs': 0.537555} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.076084] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1491.076294] env[62619]: INFO nova.compute.manager [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1491.076466] env[62619]: DEBUG nova.compute.manager [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1491.077291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.182s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.079996] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de635a55-4db1-4398-9fa3-a74043042006 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.083528] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.375s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.085493] env[62619]: INFO nova.compute.claims [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1491.088985] env[62619]: DEBUG nova.compute.manager [req-39bcb487-f09c-4f82-8467-a157c886f67c req-c8b9d787-89a9-437a-9a16-3294c7cc7844 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Received event network-changed-1e21d673-0db1-49d8-b86e-c8b8568b7452 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1491.089158] env[62619]: DEBUG nova.compute.manager [req-39bcb487-f09c-4f82-8467-a157c886f67c req-c8b9d787-89a9-437a-9a16-3294c7cc7844 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Refreshing instance network info cache due to event network-changed-1e21d673-0db1-49d8-b86e-c8b8568b7452. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1491.089531] env[62619]: DEBUG oslo_concurrency.lockutils [req-39bcb487-f09c-4f82-8467-a157c886f67c req-c8b9d787-89a9-437a-9a16-3294c7cc7844 service nova] Acquiring lock "refresh_cache-cef20063-96f0-46cc-9f7d-4436b60216c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.089596] env[62619]: DEBUG oslo_concurrency.lockutils [req-39bcb487-f09c-4f82-8467-a157c886f67c req-c8b9d787-89a9-437a-9a16-3294c7cc7844 service nova] Acquired lock "refresh_cache-cef20063-96f0-46cc-9f7d-4436b60216c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.089788] env[62619]: DEBUG nova.network.neutron [req-39bcb487-f09c-4f82-8467-a157c886f67c req-c8b9d787-89a9-437a-9a16-3294c7cc7844 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Refreshing network info cache for port 1e21d673-0db1-49d8-b86e-c8b8568b7452 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1491.113019] env[62619]: INFO nova.scheduler.client.report [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Deleted allocations for instance 7c058337-1684-4553-8e96-dd2cd1814a15 [ 1491.128171] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff1f8f13-5a8c-442d-b2b2-2a2af1535846 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.872s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.291192] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777481, 'name': CreateVM_Task, 'duration_secs': 0.538242} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.291370] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1491.292140] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.292337] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.292650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1491.292930] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a6c0861-0e44-40e3-aa04-8f5339692832 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.298819] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1491.298819] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a1a9c8-8a65-79f6-65e0-40a6b8661c78" [ 1491.298819] env[62619]: _type = "Task" [ 1491.298819] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.307899] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a1a9c8-8a65-79f6-65e0-40a6b8661c78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.565217] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b686f5-eceb-9774-c437-a3c5aeca3dcf, 'name': SearchDatastore_Task, 'duration_secs': 0.030228} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.566186] env[62619]: DEBUG nova.network.neutron [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Updated VIF entry in instance network info cache for port 3aadd3d9-5b8f-4d90-94a3-d818bbac3830. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1491.566562] env[62619]: DEBUG nova.network.neutron [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Updating instance_info_cache with network_info: [{"id": "3aadd3d9-5b8f-4d90-94a3-d818bbac3830", "address": "fa:16:3e:7e:6a:12", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3aadd3d9-5b", "ovs_interfaceid": "3aadd3d9-5b8f-4d90-94a3-d818bbac3830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.567835] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.568131] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] da806d3f-79f0-4188-a2d8-0beeb9dfec1a/da806d3f-79f0-4188-a2d8-0beeb9dfec1a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1491.568425] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebdb4730-bd98-4f64-a18c-d16cbaddad7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.576794] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1491.576794] env[62619]: value = "task-1777482" [ 1491.576794] env[62619]: _type = "Task" [ 1491.576794] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.586476] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.610870] env[62619]: INFO nova.compute.manager [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Took 46.59 seconds to build instance. [ 1491.625724] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ed088d-7a04-4d6c-9161-7c77c20b4adf tempest-FloatingIPsAssociationNegativeTestJSON-1190730756 tempest-FloatingIPsAssociationNegativeTestJSON-1190730756-project-member] Lock "7c058337-1684-4553-8e96-dd2cd1814a15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.365s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.630774] env[62619]: DEBUG nova.compute.manager [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1491.812265] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a1a9c8-8a65-79f6-65e0-40a6b8661c78, 'name': SearchDatastore_Task, 'duration_secs': 0.019639} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.814877] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.815137] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1491.815371] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.815512] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.815690] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1491.816076] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c5756c7-9db6-4b81-8bdd-dbf59d8da925 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.827413] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1491.827603] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1491.828348] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-503d5298-2959-457d-bb0a-70e05d17115c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.835266] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1491.835266] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523982c2-f9ff-a7cd-164f-e27c224d9460" [ 1491.835266] env[62619]: _type = "Task" [ 1491.835266] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.844614] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523982c2-f9ff-a7cd-164f-e27c224d9460, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.859021] env[62619]: DEBUG nova.network.neutron [req-39bcb487-f09c-4f82-8467-a157c886f67c req-c8b9d787-89a9-437a-9a16-3294c7cc7844 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Updated VIF entry in instance network info cache for port 1e21d673-0db1-49d8-b86e-c8b8568b7452. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1491.859738] env[62619]: DEBUG nova.network.neutron [req-39bcb487-f09c-4f82-8467-a157c886f67c req-c8b9d787-89a9-437a-9a16-3294c7cc7844 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Updating instance_info_cache with network_info: [{"id": "1e21d673-0db1-49d8-b86e-c8b8568b7452", "address": "fa:16:3e:04:a9:2c", "network": {"id": "f93b590d-39d9-42a4-8c3f-d05fa75fdf10", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1677764334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4a61b4c4b2b42a1933ef647b146b530", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e21d673-0d", "ovs_interfaceid": "1e21d673-0db1-49d8-b86e-c8b8568b7452", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.039832] env[62619]: DEBUG nova.network.neutron [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Successfully updated port: 2cb095b0-1732-485d-a7ea-c306699fa81f {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1492.072275] env[62619]: DEBUG oslo_concurrency.lockutils [req-d5444de1-6635-45d3-a8e3-cfe37ffafe3a req-a5ab1275-ae30-4b19-af60-6c4c9bbef6f8 service nova] Releasing lock "refresh_cache-e32cb991-a018-4b55-8cdf-378e212c8434" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.090270] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777482, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.115403] env[62619]: DEBUG oslo_concurrency.lockutils [None req-91416d2d-c230-4d23-baba-e0c7155cde5e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "060427a2-e724-4c51-879e-675154ae5df2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.749s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.150664] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.354016] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523982c2-f9ff-a7cd-164f-e27c224d9460, 'name': SearchDatastore_Task, 'duration_secs': 0.018025} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.358290] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-148e2a53-ac28-4205-a73c-64dc9de776be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.362447] env[62619]: DEBUG oslo_concurrency.lockutils [req-39bcb487-f09c-4f82-8467-a157c886f67c req-c8b9d787-89a9-437a-9a16-3294c7cc7844 service nova] Releasing lock "refresh_cache-cef20063-96f0-46cc-9f7d-4436b60216c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.365817] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1492.365817] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52504f93-f058-34a5-b42a-f1d7bf521bc4" [ 1492.365817] env[62619]: _type = "Task" [ 1492.365817] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.376332] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52504f93-f058-34a5-b42a-f1d7bf521bc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.542592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.542748] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.542961] env[62619]: DEBUG nova.network.neutron [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1492.593061] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777482, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.741811} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.596642] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] da806d3f-79f0-4188-a2d8-0beeb9dfec1a/da806d3f-79f0-4188-a2d8-0beeb9dfec1a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1492.597080] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1492.597478] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86b6bed4-09d9-4c18-88bd-4a7d5a705a87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.606338] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1492.606338] env[62619]: value = "task-1777483" [ 1492.606338] env[62619]: _type = "Task" [ 1492.606338] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.620240] env[62619]: DEBUG nova.compute.manager [None req-667f8000-b1be-4d20-84a6-8d0f926215cc tempest-ServersListShow296Test-20887118 tempest-ServersListShow296Test-20887118-project-member] [instance: 5232c3cc-29eb-43e0-91e3-763b778c3183] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1492.625184] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777483, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.666972] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f88297-eb90-45b2-9b8c-54c9344d39b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.673585] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c689b899-bd66-4fa2-aea6-d35071804b28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.709710] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e0df84-ff3a-40c7-ac76-a53f8ce3874d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.713507] env[62619]: DEBUG nova.compute.manager [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Received event network-vif-plugged-2cb095b0-1732-485d-a7ea-c306699fa81f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1492.713756] env[62619]: DEBUG oslo_concurrency.lockutils [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] Acquiring lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.713992] env[62619]: DEBUG oslo_concurrency.lockutils [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] Lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.714188] env[62619]: DEBUG oslo_concurrency.lockutils [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] Lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.714352] env[62619]: DEBUG nova.compute.manager [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] No waiting events found dispatching network-vif-plugged-2cb095b0-1732-485d-a7ea-c306699fa81f {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1492.714513] env[62619]: WARNING nova.compute.manager [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Received unexpected event network-vif-plugged-2cb095b0-1732-485d-a7ea-c306699fa81f for instance with vm_state building and task_state spawning. [ 1492.714691] env[62619]: DEBUG nova.compute.manager [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Received event network-changed-2cb095b0-1732-485d-a7ea-c306699fa81f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1492.714860] env[62619]: DEBUG nova.compute.manager [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Refreshing instance network info cache due to event network-changed-2cb095b0-1732-485d-a7ea-c306699fa81f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1492.715055] env[62619]: DEBUG oslo_concurrency.lockutils [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] Acquiring lock "refresh_cache-1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.722376] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a94d4a2-357a-4f15-ade6-76378cfcf4c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.740200] env[62619]: DEBUG nova.compute.provider_tree [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1492.877612] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52504f93-f058-34a5-b42a-f1d7bf521bc4, 'name': SearchDatastore_Task, 'duration_secs': 0.018209} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.877843] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.878116] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e32cb991-a018-4b55-8cdf-378e212c8434/e32cb991-a018-4b55-8cdf-378e212c8434.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1492.878397] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9876ec44-1008-4d82-9d2c-bb13b20d6bbc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.887579] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1492.887579] env[62619]: value = "task-1777484" [ 1492.887579] env[62619]: _type = "Task" [ 1492.887579] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.900132] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.117786] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777483, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140631} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.118081] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1493.118917] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe5225c-1f19-4075-8736-16de926178fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.124656] env[62619]: DEBUG nova.compute.manager [None req-667f8000-b1be-4d20-84a6-8d0f926215cc tempest-ServersListShow296Test-20887118 tempest-ServersListShow296Test-20887118-project-member] [instance: 5232c3cc-29eb-43e0-91e3-763b778c3183] Instance disappeared before build. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 1493.126410] env[62619]: DEBUG nova.network.neutron [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1493.150417] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] da806d3f-79f0-4188-a2d8-0beeb9dfec1a/da806d3f-79f0-4188-a2d8-0beeb9dfec1a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1493.150655] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0eca9f9-7a87-40b7-ab46-d09fe06c6c6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.179301] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1493.179301] env[62619]: value = "task-1777485" [ 1493.179301] env[62619]: _type = "Task" [ 1493.179301] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.191192] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777485, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.246927] env[62619]: DEBUG nova.scheduler.client.report [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1493.395176] env[62619]: DEBUG nova.network.neutron [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Updating instance_info_cache with network_info: [{"id": "2cb095b0-1732-485d-a7ea-c306699fa81f", "address": "fa:16:3e:65:c5:63", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cb095b0-17", "ovs_interfaceid": "2cb095b0-1732-485d-a7ea-c306699fa81f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.402911] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777484, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.660199] env[62619]: DEBUG oslo_concurrency.lockutils [None req-667f8000-b1be-4d20-84a6-8d0f926215cc tempest-ServersListShow296Test-20887118 tempest-ServersListShow296Test-20887118-project-member] Lock "5232c3cc-29eb-43e0-91e3-763b778c3183" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.481s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.692020] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.755933] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.672s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.756735] env[62619]: DEBUG nova.compute.manager [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1493.760070] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.024s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.760223] env[62619]: DEBUG nova.objects.instance [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lazy-loading 'resources' on Instance uuid 6be4f813-7171-4515-a728-5cf34665205a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1493.902492] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.902833] env[62619]: DEBUG nova.compute.manager [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Instance network_info: |[{"id": "2cb095b0-1732-485d-a7ea-c306699fa81f", "address": "fa:16:3e:65:c5:63", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cb095b0-17", "ovs_interfaceid": "2cb095b0-1732-485d-a7ea-c306699fa81f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1493.903217] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.882606} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.903532] env[62619]: DEBUG oslo_concurrency.lockutils [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] Acquired lock "refresh_cache-1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.903726] env[62619]: DEBUG nova.network.neutron [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Refreshing network info cache for port 2cb095b0-1732-485d-a7ea-c306699fa81f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1493.904968] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:c5:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cb095b0-1732-485d-a7ea-c306699fa81f', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1493.912808] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Creating folder: Project (df50ba9d97ac4c059077c87f9cfdb719). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1493.913023] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e32cb991-a018-4b55-8cdf-378e212c8434/e32cb991-a018-4b55-8cdf-378e212c8434.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1493.913434] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1493.916579] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46b8daa6-19d6-4875-833e-f609cde0655e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.918376] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-24192bbf-1e21-4499-9385-31affb397670 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.930028] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1493.930028] env[62619]: value = "task-1777487" [ 1493.930028] env[62619]: _type = "Task" [ 1493.930028] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.935462] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Created folder: Project (df50ba9d97ac4c059077c87f9cfdb719) in parent group-v368875. [ 1493.935703] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Creating folder: Instances. Parent ref: group-v368992. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1493.936015] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88f6599d-d677-4820-885a-98e609109144 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.941811] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777487, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.955078] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Created folder: Instances in parent group-v368992. [ 1493.955363] env[62619]: DEBUG oslo.service.loopingcall [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.955574] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1493.955788] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24c752fc-3398-4805-af31-29c5126f6bca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.979611] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1493.979611] env[62619]: value = "task-1777489" [ 1493.979611] env[62619]: _type = "Task" [ 1493.979611] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.995323] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777489, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.164609] env[62619]: DEBUG nova.compute.manager [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1494.191245] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777485, 'name': ReconfigVM_Task, 'duration_secs': 0.737351} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.191549] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Reconfigured VM instance instance-00000027 to attach disk [datastore1] da806d3f-79f0-4188-a2d8-0beeb9dfec1a/da806d3f-79f0-4188-a2d8-0beeb9dfec1a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1494.192214] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc66608d-dc57-421c-9b70-10fe0e0699f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.206293] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1494.206293] env[62619]: value = "task-1777490" [ 1494.206293] env[62619]: _type = "Task" [ 1494.206293] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.219154] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777490, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.249746] env[62619]: DEBUG nova.network.neutron [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Updated VIF entry in instance network info cache for port 2cb095b0-1732-485d-a7ea-c306699fa81f. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1494.250087] env[62619]: DEBUG nova.network.neutron [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Updating instance_info_cache with network_info: [{"id": "2cb095b0-1732-485d-a7ea-c306699fa81f", "address": "fa:16:3e:65:c5:63", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cb095b0-17", "ovs_interfaceid": "2cb095b0-1732-485d-a7ea-c306699fa81f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.262035] env[62619]: DEBUG nova.compute.utils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1494.264378] env[62619]: DEBUG nova.compute.manager [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1494.444965] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777487, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077211} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.444965] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1494.444965] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9527ca-1619-4795-aaa9-8b6f79851821 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.469987] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] e32cb991-a018-4b55-8cdf-378e212c8434/e32cb991-a018-4b55-8cdf-378e212c8434.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1494.475235] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe5f4979-22f7-46fc-8982-64b8cffd8a65 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.501230] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777489, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.502821] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1494.502821] env[62619]: value = "task-1777491" [ 1494.502821] env[62619]: _type = "Task" [ 1494.502821] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.517199] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777491, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.685980] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.721075] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777490, 'name': Rename_Task, 'duration_secs': 0.342151} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.721404] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1494.721627] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2101854e-9f5a-44d3-93aa-faa3f84ae292 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.731570] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1494.731570] env[62619]: value = "task-1777492" [ 1494.731570] env[62619]: _type = "Task" [ 1494.731570] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.743846] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.753050] env[62619]: DEBUG oslo_concurrency.lockutils [req-84a8ff4f-7a2e-476e-b957-b9664ff3c3a8 req-506fa4bb-bb2b-442d-b538-101e8712cdaf service nova] Releasing lock "refresh_cache-1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.768620] env[62619]: DEBUG nova.compute.manager [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1494.815016] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bbc5cb-01f5-4014-b203-12496f948c57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.828497] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdb70a9-3465-42d0-9185-856d9140f0fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.862856] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da9134f-a5e7-4e82-a061-28396f5bf45c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.872697] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9f92d4-8bb5-46dd-9c2a-42af07eb20db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.890142] env[62619]: DEBUG nova.compute.provider_tree [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1495.004091] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777489, 'name': CreateVM_Task, 'duration_secs': 0.803274} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.007915] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1495.008700] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.008886] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.009333] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1495.010099] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa1cc4c8-bb24-4122-b416-41282e0594a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.015489] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777491, 'name': ReconfigVM_Task, 'duration_secs': 0.404994} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.016635] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Reconfigured VM instance instance-00000028 to attach disk [datastore1] e32cb991-a018-4b55-8cdf-378e212c8434/e32cb991-a018-4b55-8cdf-378e212c8434.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1495.017292] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcfed287-2a25-4bcb-a1ca-80651b20ab09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.020135] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1495.020135] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52569634-3a98-5354-555e-8467421bcb96" [ 1495.020135] env[62619]: _type = "Task" [ 1495.020135] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.026952] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1495.026952] env[62619]: value = "task-1777493" [ 1495.026952] env[62619]: _type = "Task" [ 1495.026952] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.030404] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52569634-3a98-5354-555e-8467421bcb96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.042758] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777493, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.247988] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777492, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.393811] env[62619]: DEBUG nova.scheduler.client.report [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1495.506259] env[62619]: DEBUG nova.compute.manager [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1495.507258] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e119eba-5b03-482c-aca5-61cbd5f4e08d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.530809] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52569634-3a98-5354-555e-8467421bcb96, 'name': SearchDatastore_Task, 'duration_secs': 0.025769} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.536150] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.536150] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1495.536150] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.536150] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.536150] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1495.536150] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-869d88d3-f7e5-4e34-8107-672d98336770 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.545479] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777493, 'name': Rename_Task, 'duration_secs': 0.162594} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.545479] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1495.545479] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c11784d-a316-4c63-aa30-b00f168621c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.548385] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1495.548569] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1495.549598] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4356eb96-3359-4ee8-a09b-a70600fb652d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.557695] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1495.557695] env[62619]: value = "task-1777494" [ 1495.557695] env[62619]: _type = "Task" [ 1495.557695] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.559330] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1495.559330] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5242d33a-b037-dd77-c86b-064fac920235" [ 1495.559330] env[62619]: _type = "Task" [ 1495.559330] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.573864] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.576684] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5242d33a-b037-dd77-c86b-064fac920235, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.747515] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777492, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.780925] env[62619]: DEBUG nova.compute.manager [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1495.812711] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1495.813073] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1495.813240] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1495.813422] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1495.813566] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1495.813709] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1495.813963] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1495.814222] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1495.814371] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1495.814537] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1495.814692] env[62619]: DEBUG nova.virt.hardware [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1495.815663] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bb75bd-c658-4cc3-a4d1-ef41a6126326 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.827605] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a31a29-86a4-4dbb-88b3-1b481cc671f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.845203] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1495.851360] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Creating folder: Project (6e1b2ed01d3d47ed92b88c24eca73b64). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1495.851752] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-876d5c99-c699-494d-953f-d5c04ba868a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.866487] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Created folder: Project (6e1b2ed01d3d47ed92b88c24eca73b64) in parent group-v368875. [ 1495.866817] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Creating folder: Instances. Parent ref: group-v368995. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1495.867169] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09bbd46b-2111-48ee-981e-7177b4772618 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.881174] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Created folder: Instances in parent group-v368995. [ 1495.881528] env[62619]: DEBUG oslo.service.loopingcall [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.881936] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1495.882174] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0584e5c0-6e18-4678-8850-aac9b827069b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.901229] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.141s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.903605] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.736s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.904461] env[62619]: DEBUG nova.objects.instance [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lazy-loading 'resources' on Instance uuid ef41dd29-1270-4071-9e89-20132131de2d {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1495.907199] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1495.907199] env[62619]: value = "task-1777497" [ 1495.907199] env[62619]: _type = "Task" [ 1495.907199] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.923606] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777497, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.929570] env[62619]: INFO nova.scheduler.client.report [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Deleted allocations for instance 6be4f813-7171-4515-a728-5cf34665205a [ 1496.019324] env[62619]: INFO nova.compute.manager [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] instance snapshotting [ 1496.023061] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a548c30-77b4-4f06-a303-3a8e62cb0669 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.045567] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd2186d-55c5-4645-b618-b9d219238e99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.070274] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777494, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.077187] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5242d33a-b037-dd77-c86b-064fac920235, 'name': SearchDatastore_Task, 'duration_secs': 0.020375} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.078076] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27b9279f-59ce-42d3-b188-d19937f6a9d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.086612] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1496.086612] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da1fe3-b5e3-59cb-df30-1139aed7c3d9" [ 1496.086612] env[62619]: _type = "Task" [ 1496.086612] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.096861] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da1fe3-b5e3-59cb-df30-1139aed7c3d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.251597] env[62619]: DEBUG oslo_vmware.api [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777492, 'name': PowerOnVM_Task, 'duration_secs': 1.123656} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.252060] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1496.253743] env[62619]: INFO nova.compute.manager [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Took 10.78 seconds to spawn the instance on the hypervisor. [ 1496.253743] env[62619]: DEBUG nova.compute.manager [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1496.254740] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab867b5-0631-4e38-864f-69ac9b10e5e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.424333] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777497, 'name': CreateVM_Task, 'duration_secs': 0.500321} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.424333] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1496.424333] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.424333] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.424579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1496.424810] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f2d0f10-777c-4be3-9087-45f4bec76e54 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.431447] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1496.431447] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5206fcde-c6fc-7f4a-8903-d50fc8e040df" [ 1496.431447] env[62619]: _type = "Task" [ 1496.431447] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.445390] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5206fcde-c6fc-7f4a-8903-d50fc8e040df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.448408] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f8a6f3f-4be8-4a08-a066-d2d08fad1029 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "6be4f813-7171-4515-a728-5cf34665205a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.011s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.557801] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1496.558241] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f05cafa2-ae61-43f2-8f66-2201fdbf164a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.579911] env[62619]: DEBUG oslo_vmware.api [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777494, 'name': PowerOnVM_Task, 'duration_secs': 0.746823} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.580266] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1496.580266] env[62619]: value = "task-1777498" [ 1496.580266] env[62619]: _type = "Task" [ 1496.580266] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.580591] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1496.580802] env[62619]: INFO nova.compute.manager [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Took 8.45 seconds to spawn the instance on the hypervisor. [ 1496.580979] env[62619]: DEBUG nova.compute.manager [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1496.582428] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9c161f-a12a-4d71-92ae-4ba1ea905eca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.605649] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777498, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.612461] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52da1fe3-b5e3-59cb-df30-1139aed7c3d9, 'name': SearchDatastore_Task, 'duration_secs': 0.016126} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.612461] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.612461] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c/1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1496.612461] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e240e9c0-defe-42b2-b2c8-4d51cfb271a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.626446] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1496.626446] env[62619]: value = "task-1777499" [ 1496.626446] env[62619]: _type = "Task" [ 1496.626446] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.640584] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777499, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.782360] env[62619]: INFO nova.compute.manager [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Took 40.69 seconds to build instance. [ 1496.953699] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5206fcde-c6fc-7f4a-8903-d50fc8e040df, 'name': SearchDatastore_Task, 'duration_secs': 0.022862} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.960056] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.960056] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1496.960056] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.960056] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.960056] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1496.961625] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b179845b-f65c-4cfe-8b0c-cd79e2e8ee2c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.982470] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1496.982877] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1496.984461] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14c65a37-46eb-498a-aaa5-32c47ba3f5ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.001469] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1497.001469] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eeefb6-a186-8f90-66da-78452cc78ec7" [ 1497.001469] env[62619]: _type = "Task" [ 1497.001469] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.021092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ce93b2-46dc-4715-ac46-faf3535ba6fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.031971] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eeefb6-a186-8f90-66da-78452cc78ec7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.041238] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9774f67d-1222-41a4-bccc-c4524bf3229d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.090178] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0212cd-cae2-4b7f-b147-eb79aca39cf9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.126308] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777498, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.132287] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2b4e97-cd9c-48cc-ade2-0aa8980b0316 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.139505] env[62619]: INFO nova.compute.manager [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Took 37.83 seconds to build instance. [ 1497.156289] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777499, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.172315] env[62619]: DEBUG nova.compute.provider_tree [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1497.284872] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9ec9aa6c-8aa5-48bd-9395-604b3cceabca tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.752s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.525455] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eeefb6-a186-8f90-66da-78452cc78ec7, 'name': SearchDatastore_Task, 'duration_secs': 0.048138} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.525455] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e971a1e8-b32a-4f48-882b-22c11e4846d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.535148] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1497.535148] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52968f7a-e7e5-3470-1314-9da16b16beda" [ 1497.535148] env[62619]: _type = "Task" [ 1497.535148] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.548671] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52968f7a-e7e5-3470-1314-9da16b16beda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.601195] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777498, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.652798] env[62619]: DEBUG oslo_concurrency.lockutils [None req-354648db-c574-4035-9426-251a33a15a9a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e32cb991-a018-4b55-8cdf-378e212c8434" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.087s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.653679] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777499, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.714256} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.655440] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c/1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1497.655672] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1497.656220] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d51f389-4840-4208-b12f-f10d2943fdc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.676863] env[62619]: DEBUG nova.scheduler.client.report [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1497.682330] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1497.682330] env[62619]: value = "task-1777500" [ 1497.682330] env[62619]: _type = "Task" [ 1497.682330] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.697419] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.793581] env[62619]: DEBUG nova.compute.manager [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1498.047575] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52968f7a-e7e5-3470-1314-9da16b16beda, 'name': SearchDatastore_Task, 'duration_secs': 0.01504} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.048014] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.048436] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 7217d898-54ee-46ed-88fa-959c38e988e7/7217d898-54ee-46ed-88fa-959c38e988e7.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1498.048771] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5565b289-d45a-4e0f-a570-da1ff6f7984b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.057089] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1498.057089] env[62619]: value = "task-1777501" [ 1498.057089] env[62619]: _type = "Task" [ 1498.057089] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.067911] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777501, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.098778] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777498, 'name': CreateSnapshot_Task, 'duration_secs': 1.316714} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.098932] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1498.099882] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668a4b80-14b8-422f-872f-002edcbe1dc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.157137] env[62619]: DEBUG nova.compute.manager [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1498.184535] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.281s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.189132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.703s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.191248] env[62619]: INFO nova.compute.claims [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1498.216812] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.23406} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.216812] env[62619]: INFO nova.scheduler.client.report [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Deleted allocations for instance ef41dd29-1270-4071-9e89-20132131de2d [ 1498.224384] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1498.224384] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4f74f9-eaf9-4285-afbe-efce9f938557 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.253837] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c/1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1498.254240] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f2e2169-666a-4daf-81c2-3e500a1b0d9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.284017] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1498.284017] env[62619]: value = "task-1777502" [ 1498.284017] env[62619]: _type = "Task" [ 1498.284017] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.303118] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777502, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.321408] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.572548] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777501, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.624510] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1498.624559] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-aaafbbec-d69b-4d71-8da7-528a881674ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.637989] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1498.637989] env[62619]: value = "task-1777503" [ 1498.637989] env[62619]: _type = "Task" [ 1498.637989] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.650018] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777503, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.688186] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.731597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9946179b-06fc-4118-bb7f-e90f097e071e tempest-DeleteServersAdminTestJSON-1602165468 tempest-DeleteServersAdminTestJSON-1602165468-project-member] Lock "ef41dd29-1270-4071-9e89-20132131de2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.976s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.802385] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777502, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.987268] env[62619]: DEBUG oslo_vmware.rw_handles [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529e31c6-4dc0-69ae-417d-f30ad4f01b10/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1498.988760] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da0b52a-7b04-4f6f-98e8-8957c444d380 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.998228] env[62619]: DEBUG oslo_vmware.rw_handles [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529e31c6-4dc0-69ae-417d-f30ad4f01b10/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1498.998228] env[62619]: ERROR oslo_vmware.rw_handles [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529e31c6-4dc0-69ae-417d-f30ad4f01b10/disk-0.vmdk due to incomplete transfer. [ 1498.998228] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c523afc5-57cb-493a-b4c3-be20f5f1fc4b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.010268] env[62619]: DEBUG oslo_vmware.rw_handles [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529e31c6-4dc0-69ae-417d-f30ad4f01b10/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1499.011716] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Uploaded image 71660a86-0f4d-4eef-8e06-761c0311eb57 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1499.015249] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1499.015961] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-37b210ba-1828-4b7e-a241-79404f786c6d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.029656] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1499.029656] env[62619]: value = "task-1777504" [ 1499.029656] env[62619]: _type = "Task" [ 1499.029656] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.040959] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777504, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.068971] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777501, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.911332} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.070648] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 7217d898-54ee-46ed-88fa-959c38e988e7/7217d898-54ee-46ed-88fa-959c38e988e7.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1499.070648] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1499.070648] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-299acf85-140e-4b67-b0f0-3a60d0bf210f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.084476] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1499.084476] env[62619]: value = "task-1777505" [ 1499.084476] env[62619]: _type = "Task" [ 1499.084476] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.100677] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.154545] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777503, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.297030] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777502, 'name': ReconfigVM_Task, 'duration_secs': 0.824021} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.297221] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c/1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1499.298216] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1b1149b-51fa-453f-81af-16c5c4dbbcd5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.306391] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1499.306391] env[62619]: value = "task-1777508" [ 1499.306391] env[62619]: _type = "Task" [ 1499.306391] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.332668] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777508, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.547371] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777504, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.599502] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092846} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.603077] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1499.605052] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a102f2-b9e2-40d7-82f4-ee4725e4d4d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.641192] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 7217d898-54ee-46ed-88fa-959c38e988e7/7217d898-54ee-46ed-88fa-959c38e988e7.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1499.644261] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d87204ef-359b-472a-9be0-cca7fe2790ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.671791] env[62619]: DEBUG nova.compute.manager [req-203f0a21-e5c1-48d1-a277-254050f2d26f req-5836641a-0645-4f2a-a1e8-f1088d00d46a service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Received event network-changed-7df13a55-7d53-41b9-9489-591516bda30c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1499.672153] env[62619]: DEBUG nova.compute.manager [req-203f0a21-e5c1-48d1-a277-254050f2d26f req-5836641a-0645-4f2a-a1e8-f1088d00d46a service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Refreshing instance network info cache due to event network-changed-7df13a55-7d53-41b9-9489-591516bda30c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1499.672514] env[62619]: DEBUG oslo_concurrency.lockutils [req-203f0a21-e5c1-48d1-a277-254050f2d26f req-5836641a-0645-4f2a-a1e8-f1088d00d46a service nova] Acquiring lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.672910] env[62619]: DEBUG oslo_concurrency.lockutils [req-203f0a21-e5c1-48d1-a277-254050f2d26f req-5836641a-0645-4f2a-a1e8-f1088d00d46a service nova] Acquired lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.673173] env[62619]: DEBUG nova.network.neutron [req-203f0a21-e5c1-48d1-a277-254050f2d26f req-5836641a-0645-4f2a-a1e8-f1088d00d46a service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Refreshing network info cache for port 7df13a55-7d53-41b9-9489-591516bda30c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1499.689924] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777503, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.692725] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1499.692725] env[62619]: value = "task-1777511" [ 1499.692725] env[62619]: _type = "Task" [ 1499.692725] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.702142] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777511, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.820505] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777508, 'name': Rename_Task, 'duration_secs': 0.417844} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.820903] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1499.821085] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3d512fa-8f31-451e-9efd-14ac78a5d33e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.829553] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1499.829553] env[62619]: value = "task-1777512" [ 1499.829553] env[62619]: _type = "Task" [ 1499.829553] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.844079] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777512, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.866324] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60574cfc-e01a-4a46-91db-7e0bf50d0e2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.876834] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0746dec0-6397-4c60-bab4-73e4425d052a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.909898] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47ff2fb-d0a5-4267-8b32-737639672a9e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.919668] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b00050e-abd9-4994-8495-2ba2c41d58a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.938334] env[62619]: DEBUG nova.compute.provider_tree [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1499.953936] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "0a80942c-eb86-480b-ab7b-33112dd90d28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.956020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "0a80942c-eb86-480b-ab7b-33112dd90d28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.043957] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777504, 'name': Destroy_Task, 'duration_secs': 0.829039} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.045033] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Destroyed the VM [ 1500.045501] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1500.046234] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1d045220-09bb-480e-b9d0-cf46cf51fe68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.055676] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1500.055676] env[62619]: value = "task-1777513" [ 1500.055676] env[62619]: _type = "Task" [ 1500.055676] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.068951] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777513, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.155257] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777503, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.202992] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777511, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.348906] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777512, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.449507] env[62619]: DEBUG nova.scheduler.client.report [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1500.541259] env[62619]: DEBUG nova.network.neutron [req-203f0a21-e5c1-48d1-a277-254050f2d26f req-5836641a-0645-4f2a-a1e8-f1088d00d46a service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updated VIF entry in instance network info cache for port 7df13a55-7d53-41b9-9489-591516bda30c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1500.543761] env[62619]: DEBUG nova.network.neutron [req-203f0a21-e5c1-48d1-a277-254050f2d26f req-5836641a-0645-4f2a-a1e8-f1088d00d46a service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance_info_cache with network_info: [{"id": "7df13a55-7d53-41b9-9489-591516bda30c", "address": "fa:16:3e:aa:15:ba", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df13a55-7d", "ovs_interfaceid": "7df13a55-7d53-41b9-9489-591516bda30c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.569566] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777513, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.607574] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "1f86b805-0fde-4bda-9a94-d440a670e23c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.607574] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "1f86b805-0fde-4bda-9a94-d440a670e23c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.657071] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777503, 'name': CloneVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.706977] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777511, 'name': ReconfigVM_Task, 'duration_secs': 0.802113} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.706977] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 7217d898-54ee-46ed-88fa-959c38e988e7/7217d898-54ee-46ed-88fa-959c38e988e7.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1500.707646] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30960a55-39af-409c-be0c-6013fdd2c8b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.715764] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1500.715764] env[62619]: value = "task-1777514" [ 1500.715764] env[62619]: _type = "Task" [ 1500.715764] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.732424] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777514, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.844712] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777512, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.960183] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.771s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.960736] env[62619]: DEBUG nova.compute.manager [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1500.964889] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 23.081s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.965205] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.965404] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1500.965583] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.943s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.967333] env[62619]: INFO nova.compute.claims [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1500.971044] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1df2e76-1cc4-465c-b9bc-1dacf0ad1cff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.982140] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1c0759-6298-4685-8dea-0d06b77c1200 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.002065] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46170c41-f3d8-4fbf-8f60-96f0d17ca047 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.012019] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c49df50-be6c-43e5-bd6b-f34f5ea0dd5e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.049096] env[62619]: DEBUG oslo_concurrency.lockutils [req-203f0a21-e5c1-48d1-a277-254050f2d26f req-5836641a-0645-4f2a-a1e8-f1088d00d46a service nova] Releasing lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.049645] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179207MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1501.051048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.070405] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777513, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.159295] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777503, 'name': CloneVM_Task, 'duration_secs': 2.047752} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.159632] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Created linked-clone VM from snapshot [ 1501.160461] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd20f99-face-489d-a354-43709572add3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.169278] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Uploading image 6a736652-de14-4c3d-90aa-80ab551efabe {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1501.184898] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1501.185216] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-adab0169-e8d2-4b9c-824c-b1124ab269ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.192641] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1501.192641] env[62619]: value = "task-1777515" [ 1501.192641] env[62619]: _type = "Task" [ 1501.192641] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.202754] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777515, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.227183] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777514, 'name': Rename_Task, 'duration_secs': 0.336691} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.227183] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1501.227183] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35ea28c4-04bd-48d1-b5d6-0f6df33df31a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.237260] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1501.237260] env[62619]: value = "task-1777516" [ 1501.237260] env[62619]: _type = "Task" [ 1501.237260] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.244515] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777516, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.348775] env[62619]: DEBUG oslo_vmware.api [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777512, 'name': PowerOnVM_Task, 'duration_secs': 1.064811} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.351933] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1501.351933] env[62619]: INFO nova.compute.manager [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Took 10.43 seconds to spawn the instance on the hypervisor. [ 1501.351933] env[62619]: DEBUG nova.compute.manager [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1501.351933] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a5da7c-909f-4366-9bc5-f42b1c6d7140 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.472330] env[62619]: DEBUG nova.compute.utils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1501.473886] env[62619]: DEBUG nova.compute.manager [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1501.567686] env[62619]: DEBUG oslo_vmware.api [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777513, 'name': RemoveSnapshot_Task, 'duration_secs': 1.332822} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.568053] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1501.568275] env[62619]: INFO nova.compute.manager [None req-c20e4bde-79ee-490b-9f73-0a75a61a8472 tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Took 20.54 seconds to snapshot the instance on the hypervisor. [ 1501.702483] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777515, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.746927] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777516, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.876569] env[62619]: INFO nova.compute.manager [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Took 39.96 seconds to build instance. [ 1501.975877] env[62619]: DEBUG nova.compute.manager [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1502.206886] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777515, 'name': Destroy_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.248842] env[62619]: DEBUG oslo_vmware.api [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777516, 'name': PowerOnVM_Task, 'duration_secs': 0.698917} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.250611] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1502.250756] env[62619]: INFO nova.compute.manager [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Took 6.47 seconds to spawn the instance on the hypervisor. [ 1502.251036] env[62619]: DEBUG nova.compute.manager [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1502.252817] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c58fb9-f27d-41dd-b3a2-43b05b7e2ef6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.378588] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793ba671-60e6-44fc-a368-9b7f39edfe9e tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.295s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.428430] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d171c100-04f1-4aa8-8df2-490999802755 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.440075] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33ac369-e8d1-498e-920f-178ce84dcd57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.487022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce279b2-9137-425d-b731-b86fc16d46b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.493558] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95d5ffa-224e-4711-a473-d7965cf8b37d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.510692] env[62619]: DEBUG nova.compute.provider_tree [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1502.705489] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777515, 'name': Destroy_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.777482] env[62619]: INFO nova.compute.manager [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Took 40.08 seconds to build instance. [ 1502.881565] env[62619]: DEBUG nova.compute.manager [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1502.989373] env[62619]: DEBUG nova.compute.manager [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1503.013170] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1503.013441] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1503.013594] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1503.013772] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1503.013918] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1503.014100] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1503.014318] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1503.014476] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1503.014637] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1503.015128] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1503.015128] env[62619]: DEBUG nova.virt.hardware [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1503.018210] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3e9c8a-1764-41ed-891f-3a00b57443cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.027882] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9041fa1-5064-4c11-9834-fa9a95ff3c7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.042281] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1503.048192] env[62619]: DEBUG oslo.service.loopingcall [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1503.049185] env[62619]: ERROR nova.scheduler.client.report [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [req-6b566b47-5410-4477-800f-1ae29d0480dc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6b566b47-5410-4477-800f-1ae29d0480dc"}]} [ 1503.050066] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a802534f-1766-4ea9-9188-803ef197d775] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1503.052360] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-215428ad-df69-4f09-b2cd-7c99dc8a3816 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.067717] env[62619]: DEBUG nova.scheduler.client.report [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1503.072026] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1503.072026] env[62619]: value = "task-1777518" [ 1503.072026] env[62619]: _type = "Task" [ 1503.072026] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.082854] env[62619]: DEBUG nova.scheduler.client.report [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1503.083089] env[62619]: DEBUG nova.compute.provider_tree [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1503.088154] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777518, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.096013] env[62619]: DEBUG nova.scheduler.client.report [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1503.119571] env[62619]: DEBUG nova.scheduler.client.report [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1503.215631] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777515, 'name': Destroy_Task, 'duration_secs': 1.566093} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.215927] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Destroyed the VM [ 1503.216298] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1503.217133] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-512bdfd1-2304-40f6-aa56-fbd538013241 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.226486] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1503.226486] env[62619]: value = "task-1777519" [ 1503.226486] env[62619]: _type = "Task" [ 1503.226486] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.238633] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777519, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.246574] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquiring lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.247510] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.279180] env[62619]: DEBUG oslo_concurrency.lockutils [None req-110b8d67-fd6b-48fc-b6f4-fdbb5555f797 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "7217d898-54ee-46ed-88fa-959c38e988e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.662s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.411937] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.584571] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777518, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.629379] env[62619]: INFO nova.compute.manager [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Rebuilding instance [ 1503.648236] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15e4222-01ee-45e3-85de-2d83ecdf031d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.658952] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ad6dc7-f503-4c02-8fc8-0c9b76963d28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.697331] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a6e769-e61d-48f5-9dcc-2dbbe2d142f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.704338] env[62619]: DEBUG nova.compute.manager [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1503.704338] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511cb688-ee81-49b7-81a7-5274c12b2670 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.709777] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af78282-a4d4-4703-869f-a8d5caaba0fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.728515] env[62619]: DEBUG nova.compute.provider_tree [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1503.744714] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777519, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.782712] env[62619]: DEBUG nova.compute.manager [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1504.083388] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777518, 'name': CreateVM_Task, 'duration_secs': 0.536482} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.083886] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a802534f-1766-4ea9-9188-803ef197d775] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1504.084102] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.084208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.084496] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1504.084742] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8281890-0c75-43ca-bbc5-74bac8df5a5d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.090373] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1504.090373] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529d91dd-eff1-a5bc-9617-6f94128c5fe2" [ 1504.090373] env[62619]: _type = "Task" [ 1504.090373] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.099463] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529d91dd-eff1-a5bc-9617-6f94128c5fe2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.242751] env[62619]: DEBUG nova.scheduler.client.report [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1504.245130] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777519, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.307155] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.601517] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529d91dd-eff1-a5bc-9617-6f94128c5fe2, 'name': SearchDatastore_Task, 'duration_secs': 0.018471} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.601767] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.602029] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1504.602291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.602429] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.602605] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1504.602877] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a486fd77-47f4-4aee-b7e6-1ecb14d42f1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.612657] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1504.612826] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1504.613607] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61f66cfe-d5f8-4612-8579-64ecb5c1789d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.619292] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1504.619292] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d162f0-9013-a7e2-a71c-0837fed91e16" [ 1504.619292] env[62619]: _type = "Task" [ 1504.619292] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.627293] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d162f0-9013-a7e2-a71c-0837fed91e16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.725728] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1504.726067] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f80dfa9-4db2-4392-b177-c1e68f3d2448 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.735267] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1504.735267] env[62619]: value = "task-1777521" [ 1504.735267] env[62619]: _type = "Task" [ 1504.735267] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.741255] env[62619]: DEBUG oslo_vmware.api [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777519, 'name': RemoveSnapshot_Task, 'duration_secs': 1.183691} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.743052] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1504.749018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.781s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.749018] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1504.751443] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.401s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.751783] env[62619]: DEBUG nova.objects.instance [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lazy-loading 'resources' on Instance uuid 94c9a119-5c04-4550-b55d-a4a2985385d3 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1504.759489] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777521, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.859862] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "80363e16-5dd2-42ad-9ead-25b121d62211" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.860165] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "80363e16-5dd2-42ad-9ead-25b121d62211" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.860383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "80363e16-5dd2-42ad-9ead-25b121d62211-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.860559] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "80363e16-5dd2-42ad-9ead-25b121d62211-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.860727] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "80363e16-5dd2-42ad-9ead-25b121d62211-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.863273] env[62619]: INFO nova.compute.manager [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Terminating instance [ 1505.130166] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d162f0-9013-a7e2-a71c-0837fed91e16, 'name': SearchDatastore_Task, 'duration_secs': 0.015883} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.131234] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d932e322-9c39-4939-8f3f-dc2d3dbc7f28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.137467] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1505.137467] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523036b9-2444-d4e9-b061-864ec1ea7ee9" [ 1505.137467] env[62619]: _type = "Task" [ 1505.137467] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.146129] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523036b9-2444-d4e9-b061-864ec1ea7ee9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.250288] env[62619]: WARNING nova.compute.manager [None req-454f40ed-62ce-48da-a29e-2c59fe78bb7e tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Image not found during snapshot: nova.exception.ImageNotFound: Image 6a736652-de14-4c3d-90aa-80ab551efabe could not be found. [ 1505.251452] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777521, 'name': PowerOffVM_Task, 'duration_secs': 0.228055} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.252691] env[62619]: DEBUG nova.compute.utils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1505.254162] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1505.254162] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1505.254674] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1505.254777] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1505.259819] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb2772e-d9ea-48ac-9da6-738746bc3836 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.270149] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1505.270333] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bae492aa-579b-44ec-9659-785f11208032 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.316428] env[62619]: DEBUG nova.policy [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cf40e9c8cc34d578bb40763740eb5cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbc6315c41fe451a94e28d68cab87b2e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1505.368444] env[62619]: DEBUG nova.compute.manager [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1505.368668] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1505.369833] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4038f5d-2994-4f19-8658-684c686898b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.378528] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1505.381059] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c98ecda-c9d0-41c5-83ac-e299eb92ad60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.389551] env[62619]: DEBUG oslo_vmware.api [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1505.389551] env[62619]: value = "task-1777523" [ 1505.389551] env[62619]: _type = "Task" [ 1505.389551] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.403169] env[62619]: DEBUG oslo_vmware.api [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.486267] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1505.486267] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1505.487490] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleting the datastore file [datastore1] 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1505.490869] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94e2c1c6-14fd-4bcc-ac65-852d53f68a74 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.500276] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1505.500276] env[62619]: value = "task-1777524" [ 1505.500276] env[62619]: _type = "Task" [ 1505.500276] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.513364] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.651337] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523036b9-2444-d4e9-b061-864ec1ea7ee9, 'name': SearchDatastore_Task, 'duration_secs': 0.010699} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.654417] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.654722] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a802534f-1766-4ea9-9188-803ef197d775/a802534f-1766-4ea9-9188-803ef197d775.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1505.655301] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3afb7396-7408-4aba-afd3-3ef54ff9098b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.674997] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1505.674997] env[62619]: value = "task-1777525" [ 1505.674997] env[62619]: _type = "Task" [ 1505.674997] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.687847] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777525, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.699285] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Successfully created port: b96dbc91-29c9-4dc7-ab5b-6706e585d2d5 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1505.757860] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1505.766120] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1c982b-cf03-4204-9400-3d9dfaa0e233 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.776587] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276e37d4-3b74-4fb0-b2f7-a4dea9d4c376 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.821190] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a607ee3-99b7-44ce-a24d-f2df6fdc30a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.829808] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9240a43c-2d42-491b-b15f-cbcad8f1e38a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.846256] env[62619]: DEBUG nova.compute.provider_tree [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1505.900682] env[62619]: DEBUG oslo_vmware.api [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777523, 'name': PowerOffVM_Task, 'duration_secs': 0.317465} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.900982] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1505.901175] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1505.901436] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e41209cb-6165-4aa6-bb86-1fe6660ea1c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.011822] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147287} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.012132] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1506.012321] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1506.012486] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1506.023832] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1506.024085] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1506.024252] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Deleting the datastore file [datastore1] 80363e16-5dd2-42ad-9ead-25b121d62211 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1506.024528] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1a57809-5d67-4337-b783-8410b8f4cab6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.034025] env[62619]: DEBUG oslo_vmware.api [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1506.034025] env[62619]: value = "task-1777527" [ 1506.034025] env[62619]: _type = "Task" [ 1506.034025] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.045922] env[62619]: DEBUG oslo_vmware.api [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.190015] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777525, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.379240] env[62619]: DEBUG nova.scheduler.client.report [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 67 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1506.379392] env[62619]: DEBUG nova.compute.provider_tree [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 67 to 68 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1506.379494] env[62619]: DEBUG nova.compute.provider_tree [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1506.544221] env[62619]: DEBUG oslo_vmware.api [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.411049} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.544500] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1506.544710] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1506.544895] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1506.545103] env[62619]: INFO nova.compute.manager [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1506.545428] env[62619]: DEBUG oslo.service.loopingcall [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1506.545646] env[62619]: DEBUG nova.compute.manager [-] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1506.545747] env[62619]: DEBUG nova.network.neutron [-] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1506.688509] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777525, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.578058} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.688509] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a802534f-1766-4ea9-9188-803ef197d775/a802534f-1766-4ea9-9188-803ef197d775.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1506.688663] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1506.688875] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d451dec3-ac76-4139-a301-efac8961d349 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.697285] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1506.697285] env[62619]: value = "task-1777529" [ 1506.697285] env[62619]: _type = "Task" [ 1506.697285] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.702692] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "060427a2-e724-4c51-879e-675154ae5df2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.702948] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "060427a2-e724-4c51-879e-675154ae5df2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.703199] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "060427a2-e724-4c51-879e-675154ae5df2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.703421] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "060427a2-e724-4c51-879e-675154ae5df2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.703610] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "060427a2-e724-4c51-879e-675154ae5df2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.708226] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.708737] env[62619]: INFO nova.compute.manager [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Terminating instance [ 1506.775657] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1506.806359] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1506.806519] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1506.806674] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1506.806851] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1506.806993] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1506.807153] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1506.807361] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1506.807519] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1506.807679] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1506.807836] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1506.808019] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1506.808903] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0f18a3-e6f7-457d-aeb9-aabea46ada9a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.822075] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87756730-5d56-4d6e-b53c-22bc47812cb1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.848422] env[62619]: DEBUG nova.compute.manager [req-1f87d2e3-a755-4e67-bc55-01addb149a41 req-c3bd0110-d480-4009-b38e-65bda63ee98d service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Received event network-vif-deleted-a0692e57-a205-4f22-b960-7d48b202c513 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1506.848669] env[62619]: INFO nova.compute.manager [req-1f87d2e3-a755-4e67-bc55-01addb149a41 req-c3bd0110-d480-4009-b38e-65bda63ee98d service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Neutron deleted interface a0692e57-a205-4f22-b960-7d48b202c513; detaching it from the instance and deleting it from the info cache [ 1506.848837] env[62619]: DEBUG nova.network.neutron [req-1f87d2e3-a755-4e67-bc55-01addb149a41 req-c3bd0110-d480-4009-b38e-65bda63ee98d service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1506.884832] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.133s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.888044] env[62619]: DEBUG oslo_concurrency.lockutils [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.518s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.888143] env[62619]: DEBUG nova.objects.instance [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lazy-loading 'resources' on Instance uuid 2a41be15-efaf-4e78-a278-2711cb11e98f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1506.908207] env[62619]: INFO nova.scheduler.client.report [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Deleted allocations for instance 94c9a119-5c04-4550-b55d-a4a2985385d3 [ 1507.043598] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1507.043896] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1507.044098] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1507.044325] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1507.044551] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1507.044800] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1507.045056] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1507.045228] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1507.045422] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1507.045591] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1507.045789] env[62619]: DEBUG nova.virt.hardware [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1507.046698] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f607d1-bb5e-4b42-acb3-ec157815751a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.055363] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d51bbbf-9a1d-470d-bf31-0f32835e7a8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.071040] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:c5:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cb095b0-1732-485d-a7ea-c306699fa81f', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1507.078079] env[62619]: DEBUG oslo.service.loopingcall [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1507.078309] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1507.078512] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c8168be-5a88-4b72-9518-200235c317ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.098319] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1507.098319] env[62619]: value = "task-1777530" [ 1507.098319] env[62619]: _type = "Task" [ 1507.098319] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.889129] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Successfully updated port: b96dbc91-29c9-4dc7-ab5b-6706e585d2d5 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1507.890711] env[62619]: DEBUG nova.compute.manager [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1507.890940] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1507.892096] env[62619]: DEBUG nova.network.neutron [-] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.899280] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa32f7b7-3766-48d3-9bdc-d310f32398ed tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "94c9a119-5c04-4550-b55d-a4a2985385d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.453s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.902105] env[62619]: DEBUG nova.compute.manager [req-98a31890-86f6-4e97-bdf9-4eebc00b2e2f req-7cbf4bdb-aef7-4b39-99c3-1c2e8ca199f7 service nova] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Received event network-vif-plugged-b96dbc91-29c9-4dc7-ab5b-6706e585d2d5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1507.902105] env[62619]: DEBUG oslo_concurrency.lockutils [req-98a31890-86f6-4e97-bdf9-4eebc00b2e2f req-7cbf4bdb-aef7-4b39-99c3-1c2e8ca199f7 service nova] Acquiring lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.902105] env[62619]: DEBUG oslo_concurrency.lockutils [req-98a31890-86f6-4e97-bdf9-4eebc00b2e2f req-7cbf4bdb-aef7-4b39-99c3-1c2e8ca199f7 service nova] Lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.902105] env[62619]: DEBUG oslo_concurrency.lockutils [req-98a31890-86f6-4e97-bdf9-4eebc00b2e2f req-7cbf4bdb-aef7-4b39-99c3-1c2e8ca199f7 service nova] Lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.902363] env[62619]: DEBUG nova.compute.manager [req-98a31890-86f6-4e97-bdf9-4eebc00b2e2f req-7cbf4bdb-aef7-4b39-99c3-1c2e8ca199f7 service nova] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] No waiting events found dispatching network-vif-plugged-b96dbc91-29c9-4dc7-ab5b-6706e585d2d5 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1507.902400] env[62619]: WARNING nova.compute.manager [req-98a31890-86f6-4e97-bdf9-4eebc00b2e2f req-7cbf4bdb-aef7-4b39-99c3-1c2e8ca199f7 service nova] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Received unexpected event network-vif-plugged-b96dbc91-29c9-4dc7-ab5b-6706e585d2d5 for instance with vm_state building and task_state spawning. [ 1507.908773] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25812f45-a772-4ef9-864e-453722172e8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.912029] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e114fee1-746a-4ea7-9f94-52cc88bb45da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.929222] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075291} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.929428] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777530, 'name': CreateVM_Task, 'duration_secs': 0.73745} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.929659] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1507.931599] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1507.931770] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1507.931980] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef5abeaa-f2a9-413f-b702-bd0d678b03ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.937347] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ffcebb-f0b7-4ac2-abb6-60ced849a3cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.940785] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.941078] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.941820] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1507.943549] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d564f8c7-ef92-4cb5-b996-3d9aa00fee69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.948821] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bc372d-e846-4b73-a157-a50b019d6d2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.971547] env[62619]: DEBUG oslo_vmware.api [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1507.971547] env[62619]: value = "task-1777531" [ 1507.971547] env[62619]: _type = "Task" [ 1507.971547] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.981269] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] a802534f-1766-4ea9-9188-803ef197d775/a802534f-1766-4ea9-9188-803ef197d775.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1507.983988] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ae04829-e2b1-445d-8cef-f429484aea4d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.998259] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1507.998259] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526c96a0-58e6-cb82-350c-ac6b863f19cb" [ 1507.998259] env[62619]: _type = "Task" [ 1507.998259] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.021630] env[62619]: DEBUG nova.compute.manager [req-1f87d2e3-a755-4e67-bc55-01addb149a41 req-c3bd0110-d480-4009-b38e-65bda63ee98d service nova] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Detach interface failed, port_id=a0692e57-a205-4f22-b960-7d48b202c513, reason: Instance 80363e16-5dd2-42ad-9ead-25b121d62211 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1508.036769] env[62619]: DEBUG oslo_vmware.api [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777531, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.043767] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526c96a0-58e6-cb82-350c-ac6b863f19cb, 'name': SearchDatastore_Task, 'duration_secs': 0.024753} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.044157] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1508.044157] env[62619]: value = "task-1777532" [ 1508.044157] env[62619]: _type = "Task" [ 1508.044157] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.048480] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.048894] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1508.049286] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.049557] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.049915] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1508.050761] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0f24b2b-df7e-4a5d-bb78-180adfa4008d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.070833] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777532, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.072664] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1508.072927] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1508.074028] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64a12a11-85d8-411f-b191-0c6629ede2cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.086237] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1508.086237] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52308928-fd3f-5097-0c6f-976a46447cc6" [ 1508.086237] env[62619]: _type = "Task" [ 1508.086237] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.100792] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52308928-fd3f-5097-0c6f-976a46447cc6, 'name': SearchDatastore_Task, 'duration_secs': 0.010755} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.101653] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9773c4ff-8151-4351-9db6-d3cc73a22095 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.112329] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1508.112329] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52840d45-3b58-0f14-37c6-548c491209b5" [ 1508.112329] env[62619]: _type = "Task" [ 1508.112329] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.123189] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52840d45-3b58-0f14-37c6-548c491209b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.396269] env[62619]: INFO nova.compute.manager [-] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Took 1.85 seconds to deallocate network for instance. [ 1508.405995] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "refresh_cache-ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.406246] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "refresh_cache-ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.406439] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1508.496233] env[62619]: DEBUG oslo_vmware.api [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777531, 'name': PowerOffVM_Task, 'duration_secs': 0.211506} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.499115] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1508.499321] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1508.499780] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52336ca0-1224-4d62-a28e-979d289ec2b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.540848] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d09e801-7141-457d-9b98-00654ee8099d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.549833] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ee8c69-1799-4b0b-87e2-e76b057a535f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.562505] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777532, 'name': ReconfigVM_Task, 'duration_secs': 0.436837} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.588483] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Reconfigured VM instance instance-0000002b to attach disk [datastore1] a802534f-1766-4ea9-9188-803ef197d775/a802534f-1766-4ea9-9188-803ef197d775.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1508.589276] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dff14bf9-b765-4ec4-84b4-36021e2ec54d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.591734] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4acd650e-a201-42e8-b3f4-96def1794dcd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.596567] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1508.596781] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1508.596948] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Deleting the datastore file [datastore1] 060427a2-e724-4c51-879e-675154ae5df2 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1508.597218] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43afffcf-e9fd-4a8d-a560-3ced344a4563 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.606147] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0aaa77-437d-4395-aade-29fc4971e103 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.609299] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1508.609299] env[62619]: value = "task-1777534" [ 1508.609299] env[62619]: _type = "Task" [ 1508.609299] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.610762] env[62619]: DEBUG oslo_vmware.api [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1508.610762] env[62619]: value = "task-1777535" [ 1508.610762] env[62619]: _type = "Task" [ 1508.610762] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.629280] env[62619]: DEBUG nova.compute.provider_tree [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1508.634994] env[62619]: DEBUG oslo_concurrency.lockutils [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "4ee81568-ad9a-4ded-b6fe-15503d85968e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.635446] env[62619]: DEBUG oslo_concurrency.lockutils [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "4ee81568-ad9a-4ded-b6fe-15503d85968e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.635703] env[62619]: DEBUG oslo_concurrency.lockutils [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "4ee81568-ad9a-4ded-b6fe-15503d85968e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.635980] env[62619]: DEBUG oslo_concurrency.lockutils [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "4ee81568-ad9a-4ded-b6fe-15503d85968e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.636243] env[62619]: DEBUG oslo_concurrency.lockutils [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "4ee81568-ad9a-4ded-b6fe-15503d85968e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.638046] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777534, 'name': Rename_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.645377] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52840d45-3b58-0f14-37c6-548c491209b5, 'name': SearchDatastore_Task, 'duration_secs': 0.016733} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.645562] env[62619]: DEBUG oslo_vmware.api [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.646591] env[62619]: INFO nova.compute.manager [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Terminating instance [ 1508.647996] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.648265] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c/1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1508.649667] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de29e1fe-1513-4d8b-8059-308d1623dba4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.672315] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1508.672315] env[62619]: value = "task-1777536" [ 1508.672315] env[62619]: _type = "Task" [ 1508.672315] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.681835] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777536, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.912397] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.940900] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1509.130519] env[62619]: DEBUG oslo_vmware.api [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259052} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.130807] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777534, 'name': Rename_Task, 'duration_secs': 0.19466} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.131861] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1509.131861] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1509.131861] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1509.131861] env[62619]: INFO nova.compute.manager [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1509.132544] env[62619]: DEBUG oslo.service.loopingcall [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1509.132544] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1509.132544] env[62619]: DEBUG nova.compute.manager [-] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1509.132544] env[62619]: DEBUG nova.network.neutron [-] [instance: 060427a2-e724-4c51-879e-675154ae5df2] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1509.134762] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bf045a4-d503-4136-8891-53a7eb31aa1e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.147151] env[62619]: DEBUG nova.scheduler.client.report [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1509.153823] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1509.153823] env[62619]: value = "task-1777538" [ 1509.153823] env[62619]: _type = "Task" [ 1509.153823] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.155116] env[62619]: DEBUG nova.compute.manager [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1509.155629] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1509.156825] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a47e561-b47a-422b-a582-028218d3472f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.165958] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Updating instance_info_cache with network_info: [{"id": "b96dbc91-29c9-4dc7-ab5b-6706e585d2d5", "address": "fa:16:3e:1a:55:a7", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb96dbc91-29", "ovs_interfaceid": "b96dbc91-29c9-4dc7-ab5b-6706e585d2d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.178420] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1509.188492] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8d08214-a4ad-4bd5-9b0c-84dbaff840a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.189842] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777538, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.203644] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777536, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.206694] env[62619]: DEBUG oslo_vmware.api [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1509.206694] env[62619]: value = "task-1777539" [ 1509.206694] env[62619]: _type = "Task" [ 1509.206694] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.224667] env[62619]: DEBUG oslo_vmware.api [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777539, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.524677] env[62619]: DEBUG nova.compute.manager [req-0833eeab-3c34-4a1a-af78-d294da8500d2 req-5e49959d-d167-46d6-a4d1-e3da142839da service nova] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Received event network-changed-b96dbc91-29c9-4dc7-ab5b-6706e585d2d5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1509.524828] env[62619]: DEBUG nova.compute.manager [req-0833eeab-3c34-4a1a-af78-d294da8500d2 req-5e49959d-d167-46d6-a4d1-e3da142839da service nova] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Refreshing instance network info cache due to event network-changed-b96dbc91-29c9-4dc7-ab5b-6706e585d2d5. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1509.525040] env[62619]: DEBUG oslo_concurrency.lockutils [req-0833eeab-3c34-4a1a-af78-d294da8500d2 req-5e49959d-d167-46d6-a4d1-e3da142839da service nova] Acquiring lock "refresh_cache-ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.600718] env[62619]: DEBUG nova.compute.manager [req-971884ac-8ae9-4291-9546-0a9ddd17482f req-9e84f80b-06c2-45f5-aa39-2d825e899ffb service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Received event network-vif-deleted-4204271d-966d-4e43-9791-80b4443eccc9 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1509.600718] env[62619]: INFO nova.compute.manager [req-971884ac-8ae9-4291-9546-0a9ddd17482f req-9e84f80b-06c2-45f5-aa39-2d825e899ffb service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Neutron deleted interface 4204271d-966d-4e43-9791-80b4443eccc9; detaching it from the instance and deleting it from the info cache [ 1509.602580] env[62619]: DEBUG nova.network.neutron [req-971884ac-8ae9-4291-9546-0a9ddd17482f req-9e84f80b-06c2-45f5-aa39-2d825e899ffb service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.655487] env[62619]: DEBUG oslo_concurrency.lockutils [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.768s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.658186] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.586s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.663031] env[62619]: INFO nova.compute.claims [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1509.669404] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "refresh_cache-ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.669663] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Instance network_info: |[{"id": "b96dbc91-29c9-4dc7-ab5b-6706e585d2d5", "address": "fa:16:3e:1a:55:a7", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb96dbc91-29", "ovs_interfaceid": "b96dbc91-29c9-4dc7-ab5b-6706e585d2d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1509.669936] env[62619]: DEBUG oslo_concurrency.lockutils [req-0833eeab-3c34-4a1a-af78-d294da8500d2 req-5e49959d-d167-46d6-a4d1-e3da142839da service nova] Acquired lock "refresh_cache-ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.670124] env[62619]: DEBUG nova.network.neutron [req-0833eeab-3c34-4a1a-af78-d294da8500d2 req-5e49959d-d167-46d6-a4d1-e3da142839da service nova] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Refreshing network info cache for port b96dbc91-29c9-4dc7-ab5b-6706e585d2d5 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1509.671463] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:55:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b96dbc91-29c9-4dc7-ab5b-6706e585d2d5', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1509.679524] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Creating folder: Project (dbc6315c41fe451a94e28d68cab87b2e). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1509.686763] env[62619]: INFO nova.scheduler.client.report [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted allocations for instance 2a41be15-efaf-4e78-a278-2711cb11e98f [ 1509.687842] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ea902f2-dcc9-4a27-ab3d-5d0fbda89cda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.690479] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777538, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.710027] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777536, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.740961} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.710027] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c/1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1509.710027] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1509.710027] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3571654-c4d1-48c8-8910-c6223af21e1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.715486] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Created folder: Project (dbc6315c41fe451a94e28d68cab87b2e) in parent group-v368875. [ 1509.715675] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Creating folder: Instances. Parent ref: group-v369006. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1509.716248] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-adc87216-c53f-42d1-8105-3b8eb2434b5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.720989] env[62619]: DEBUG oslo_vmware.api [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777539, 'name': PowerOffVM_Task, 'duration_secs': 0.47306} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.722244] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1509.722403] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1509.722702] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1509.722702] env[62619]: value = "task-1777541" [ 1509.722702] env[62619]: _type = "Task" [ 1509.722702] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.722876] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2de08a2f-7891-40d2-b1f3-b77da116b450 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.730329] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Created folder: Instances in parent group-v369006. [ 1509.730611] env[62619]: DEBUG oslo.service.loopingcall [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1509.734885] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1509.735864] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12f702cd-bccc-4072-a6d5-e26b17f1e271 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.757622] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777541, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.766041] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1509.766041] env[62619]: value = "task-1777544" [ 1509.766041] env[62619]: _type = "Task" [ 1509.766041] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.776678] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777544, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.831608] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1509.831826] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1509.832015] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Deleting the datastore file [datastore1] 4ee81568-ad9a-4ded-b6fe-15503d85968e {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1509.832302] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68688857-efba-45ac-862a-4188644f474a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.840248] env[62619]: DEBUG oslo_vmware.api [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for the task: (returnval){ [ 1509.840248] env[62619]: value = "task-1777545" [ 1509.840248] env[62619]: _type = "Task" [ 1509.840248] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.850786] env[62619]: DEBUG oslo_vmware.api [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.952950] env[62619]: DEBUG nova.network.neutron [req-0833eeab-3c34-4a1a-af78-d294da8500d2 req-5e49959d-d167-46d6-a4d1-e3da142839da service nova] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Updated VIF entry in instance network info cache for port b96dbc91-29c9-4dc7-ab5b-6706e585d2d5. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1509.953678] env[62619]: DEBUG nova.network.neutron [req-0833eeab-3c34-4a1a-af78-d294da8500d2 req-5e49959d-d167-46d6-a4d1-e3da142839da service nova] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Updating instance_info_cache with network_info: [{"id": "b96dbc91-29c9-4dc7-ab5b-6706e585d2d5", "address": "fa:16:3e:1a:55:a7", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb96dbc91-29", "ovs_interfaceid": "b96dbc91-29c9-4dc7-ab5b-6706e585d2d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.990585] env[62619]: DEBUG nova.network.neutron [-] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.104609] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cef4d85d-67e0-47fe-b504-9b53c764f37d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.114713] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d082fcd-50d1-4aa8-af80-ff60f9ca8e75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.154963] env[62619]: DEBUG nova.compute.manager [req-971884ac-8ae9-4291-9546-0a9ddd17482f req-9e84f80b-06c2-45f5-aa39-2d825e899ffb service nova] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Detach interface failed, port_id=4204271d-966d-4e43-9791-80b4443eccc9, reason: Instance 060427a2-e724-4c51-879e-675154ae5df2 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1510.177965] env[62619]: DEBUG oslo_vmware.api [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777538, 'name': PowerOnVM_Task, 'duration_secs': 0.917802} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.179242] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1510.179405] env[62619]: INFO nova.compute.manager [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Took 7.19 seconds to spawn the instance on the hypervisor. [ 1510.179581] env[62619]: DEBUG nova.compute.manager [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1510.180375] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062d89e9-f3f0-45cd-813c-143a2e6312c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.201960] env[62619]: DEBUG oslo_concurrency.lockutils [None req-099cf636-3c5d-4862-87ca-c21add83c222 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "2a41be15-efaf-4e78-a278-2711cb11e98f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.608s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.240889] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777541, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08065} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.241378] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1510.242637] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a168694b-8e37-4c45-b3fc-07b1187716eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.271181] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c/1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1510.271418] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f941c1d2-0775-43bf-af29-9c6291093f7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.298046] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777544, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.299820] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1510.299820] env[62619]: value = "task-1777546" [ 1510.299820] env[62619]: _type = "Task" [ 1510.299820] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.308963] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777546, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.350489] env[62619]: DEBUG oslo_vmware.api [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Task: {'id': task-1777545, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.461049} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.350785] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1510.351077] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1510.351195] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1510.351396] env[62619]: INFO nova.compute.manager [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1510.351659] env[62619]: DEBUG oslo.service.loopingcall [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1510.351875] env[62619]: DEBUG nova.compute.manager [-] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1510.351982] env[62619]: DEBUG nova.network.neutron [-] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1510.456823] env[62619]: DEBUG oslo_concurrency.lockutils [req-0833eeab-3c34-4a1a-af78-d294da8500d2 req-5e49959d-d167-46d6-a4d1-e3da142839da service nova] Releasing lock "refresh_cache-ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.495164] env[62619]: INFO nova.compute.manager [-] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Took 1.36 seconds to deallocate network for instance. [ 1510.700827] env[62619]: INFO nova.compute.manager [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Took 44.23 seconds to build instance. [ 1510.787119] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777544, 'name': CreateVM_Task, 'duration_secs': 0.724277} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.787119] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1510.787119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.787354] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.787551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1510.787798] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1890695c-6d5b-448d-be4b-b4575041a755 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.793855] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1510.793855] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5297f4ba-adc3-4bb4-887b-e8a93ed0c0c1" [ 1510.793855] env[62619]: _type = "Task" [ 1510.793855] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.806617] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5297f4ba-adc3-4bb4-887b-e8a93ed0c0c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.814043] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777546, 'name': ReconfigVM_Task, 'duration_secs': 0.395429} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.814216] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c/1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1510.814978] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-183cf456-5659-46ed-89cd-c5d2f6940520 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.824636] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1510.824636] env[62619]: value = "task-1777548" [ 1510.824636] env[62619]: _type = "Task" [ 1510.824636] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.833759] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777548, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.000101] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.088742] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1c5b8d-d244-4d9b-9f13-2195e06cd4ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.097493] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63bad90-50b9-445c-9737-89ed172c46f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.101082] env[62619]: DEBUG nova.network.neutron [-] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.130359] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f908d91-e8a8-4f4b-b3f9-539cecb4c601 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.139078] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99d376e-198b-422f-90dc-d5c8c65b85d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.154359] env[62619]: DEBUG nova.compute.provider_tree [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.203911] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14792ed5-7088-44e2-b4ad-5c690104952b tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "a802534f-1766-4ea9-9188-803ef197d775" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.629s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.304699] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5297f4ba-adc3-4bb4-887b-e8a93ed0c0c1, 'name': SearchDatastore_Task, 'duration_secs': 0.022777} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.305026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.305252] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1511.305489] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.305732] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.305818] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1511.306070] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-437bd30b-3577-40d1-80bf-cda85e823b69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.319294] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1511.319473] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1511.320209] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6bbbc6a-f237-4a28-93fa-3d3510a2dcc8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.326248] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1511.326248] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527351a8-939e-9e30-fa93-32685fee2bc5" [ 1511.326248] env[62619]: _type = "Task" [ 1511.326248] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.341042] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777548, 'name': Rename_Task, 'duration_secs': 0.166709} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.341279] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527351a8-939e-9e30-fa93-32685fee2bc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.341524] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1511.341775] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9fc1d3f-d5ac-4108-9291-434862bf0cf3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.350058] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1511.350058] env[62619]: value = "task-1777549" [ 1511.350058] env[62619]: _type = "Task" [ 1511.350058] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.361024] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.455445] env[62619]: INFO nova.compute.manager [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Rebuilding instance [ 1511.493820] env[62619]: DEBUG nova.compute.manager [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1511.494819] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147d89be-24d5-46bc-9899-595a9618b986 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.603897] env[62619]: INFO nova.compute.manager [-] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Took 1.25 seconds to deallocate network for instance. [ 1511.657033] env[62619]: DEBUG nova.scheduler.client.report [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1511.665355] env[62619]: DEBUG nova.compute.manager [req-f0baf7e2-cddd-47b7-af3d-c392d32a3dd0 req-a4ae6950-d6d8-4cfe-b9b0-a349d75a0396 service nova] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Received event network-vif-deleted-2de36e51-915e-40de-9915-eb1bca9827ad {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1511.706488] env[62619]: DEBUG nova.compute.manager [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1511.841052] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527351a8-939e-9e30-fa93-32685fee2bc5, 'name': SearchDatastore_Task, 'duration_secs': 0.030844} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.841879] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bf63ca8-df8d-4c53-b26b-5fa322224be2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.848715] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1511.848715] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527e03af-0de8-87e1-e0a6-e3bccaffb2b0" [ 1511.848715] env[62619]: _type = "Task" [ 1511.848715] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.861958] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527e03af-0de8-87e1-e0a6-e3bccaffb2b0, 'name': SearchDatastore_Task, 'duration_secs': 0.011756} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.865355] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.865501] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ca5f5f6b-5303-4af4-adaa-e4aac72a90f8/ca5f5f6b-5303-4af4-adaa-e4aac72a90f8.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1511.865758] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777549, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.865966] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-820ff444-25e2-4f2f-b4dc-6ac61761d343 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.874459] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1511.874459] env[62619]: value = "task-1777550" [ 1511.874459] env[62619]: _type = "Task" [ 1511.874459] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.884206] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777550, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.110761] env[62619]: DEBUG oslo_concurrency.lockutils [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.161786] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.504s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.162268] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1512.164960] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.919s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.166372] env[62619]: INFO nova.compute.claims [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1512.231422] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.364020] env[62619]: DEBUG oslo_vmware.api [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777549, 'name': PowerOnVM_Task, 'duration_secs': 0.576966} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.364381] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1512.364596] env[62619]: DEBUG nova.compute.manager [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1512.365450] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887c1d5f-c8bf-4ef3-945a-0ebfba17f5d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.385203] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777550, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.511234] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1512.512122] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed3633e7-c574-4730-b38d-432a1557270d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.519750] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1512.519750] env[62619]: value = "task-1777551" [ 1512.519750] env[62619]: _type = "Task" [ 1512.519750] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.528367] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777551, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.671418] env[62619]: DEBUG nova.compute.utils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1512.672559] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1512.672780] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1512.716352] env[62619]: DEBUG nova.policy [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cf40e9c8cc34d578bb40763740eb5cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbc6315c41fe451a94e28d68cab87b2e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1512.891066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.898097] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777550, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580613} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.898417] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ca5f5f6b-5303-4af4-adaa-e4aac72a90f8/ca5f5f6b-5303-4af4-adaa-e4aac72a90f8.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1512.898637] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1512.898894] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba6effb5-252d-4cbd-9dd5-5a87ba7bc621 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.908178] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1512.908178] env[62619]: value = "task-1777553" [ 1512.908178] env[62619]: _type = "Task" [ 1512.908178] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.918597] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777553, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.930035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquiring lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.930035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.930035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquiring lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.930221] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.930356] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.932526] env[62619]: INFO nova.compute.manager [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Terminating instance [ 1513.022645] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Successfully created port: 88e763dc-8978-45a5-a870-d55441fb392a {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1513.038138] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777551, 'name': PowerOffVM_Task, 'duration_secs': 0.134714} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.038138] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1513.038138] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1513.038335] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39b94cf-1d8d-4f1d-9dbd-d88d1ea45116 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.047044] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1513.047044] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2d1cd55-9a45-434e-992e-582bedd230f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.079182] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1513.079182] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1513.079182] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Deleting the datastore file [datastore1] a802534f-1766-4ea9-9188-803ef197d775 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1513.079182] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea662c3d-36c7-4d9a-b5e6-47362dbbff6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.086018] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1513.086018] env[62619]: value = "task-1777555" [ 1513.086018] env[62619]: _type = "Task" [ 1513.086018] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.097754] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777555, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.176372] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1513.422061] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777553, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.320185} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.422061] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1513.422061] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4287fd8-cd55-4182-9af8-cfd89d8a36ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.439975] env[62619]: DEBUG nova.compute.manager [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1513.440218] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1513.449172] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] ca5f5f6b-5303-4af4-adaa-e4aac72a90f8/ca5f5f6b-5303-4af4-adaa-e4aac72a90f8.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1513.452554] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba77a050-d6e3-4bee-a334-be428613d84b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.455075] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1dd5343-4af0-4f33-8616-dfa5c0ff08f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.473609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.473843] env[62619]: DEBUG oslo_concurrency.lockutils [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.474088] env[62619]: DEBUG oslo_concurrency.lockutils [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.474296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.474470] env[62619]: DEBUG oslo_concurrency.lockutils [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.476517] env[62619]: INFO nova.compute.manager [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Terminating instance [ 1513.484544] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1513.488763] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1963a28c-f658-48bd-85f0-03f12759672e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.490383] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1513.490383] env[62619]: value = "task-1777556" [ 1513.490383] env[62619]: _type = "Task" [ 1513.490383] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.498396] env[62619]: DEBUG oslo_vmware.api [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for the task: (returnval){ [ 1513.498396] env[62619]: value = "task-1777557" [ 1513.498396] env[62619]: _type = "Task" [ 1513.498396] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.504600] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777556, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.518776] env[62619]: DEBUG oslo_vmware.api [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777557, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.595684] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777555, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.672725] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31028bc4-79b5-4526-b283-becc9f3a0a32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.680726] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4dde1e8-169a-4906-b286-84ee6a7d8d05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.717245] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3496cea-4898-4809-adbc-953ced934f57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.727143] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7af2cc7-7cf3-4398-ac32-cd68118b9385 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.744113] env[62619]: DEBUG nova.compute.provider_tree [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1513.980339] env[62619]: DEBUG nova.compute.manager [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1513.980339] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1513.981167] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8bd553-43ac-4a64-8e9e-8cf702052395 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.989904] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1513.990169] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bab91a21-1993-46dc-916d-1746d3ecaf36 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.998736] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.998974] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.000349] env[62619]: DEBUG oslo_vmware.api [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1514.000349] env[62619]: value = "task-1777558" [ 1514.000349] env[62619]: _type = "Task" [ 1514.000349] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.003632] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777556, 'name': ReconfigVM_Task, 'duration_secs': 0.432735} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.010052] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Reconfigured VM instance instance-0000002c to attach disk [datastore1] ca5f5f6b-5303-4af4-adaa-e4aac72a90f8/ca5f5f6b-5303-4af4-adaa-e4aac72a90f8.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1514.010494] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-742b6bf9-ca15-439e-9379-01d60b05dbf8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.017628] env[62619]: DEBUG oslo_vmware.api [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.021625] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1514.021625] env[62619]: value = "task-1777559" [ 1514.021625] env[62619]: _type = "Task" [ 1514.021625] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.021875] env[62619]: DEBUG oslo_vmware.api [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777557, 'name': PowerOffVM_Task, 'duration_secs': 0.335187} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.022204] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1514.022373] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1514.025797] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1a9f710-08c5-40cf-baba-7228a402fcd3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.033492] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777559, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.098548] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777555, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.87243} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.098867] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1514.099106] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1514.099681] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1514.168873] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1514.169155] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1514.169344] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Deleting the datastore file [datastore1] d4230edc-cfda-4b9f-ab42-2f39c699ff03 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1514.169670] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b862cc0-9fee-4310-81e1-2e27cb7a85ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.182621] env[62619]: DEBUG oslo_vmware.api [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for the task: (returnval){ [ 1514.182621] env[62619]: value = "task-1777561" [ 1514.182621] env[62619]: _type = "Task" [ 1514.182621] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.190928] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1514.202878] env[62619]: DEBUG oslo_vmware.api [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.217236] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1514.217626] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1514.217799] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1514.217996] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1514.218244] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1514.218531] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1514.218786] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1514.218966] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1514.219165] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1514.219336] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1514.219516] env[62619]: DEBUG nova.virt.hardware [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1514.220450] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d00491f-4767-4df4-8a2e-59e1f98997f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.234863] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b439539c-2130-4497-9501-1813ca383649 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.251485] env[62619]: DEBUG nova.scheduler.client.report [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1514.445488] env[62619]: DEBUG nova.compute.manager [req-595e3b84-0cda-4699-9c7e-c05d4dc2b957 req-52aa9794-ae17-4e6e-bf6b-4c05f07c24e0 service nova] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Received event network-vif-plugged-88e763dc-8978-45a5-a870-d55441fb392a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1514.445488] env[62619]: DEBUG oslo_concurrency.lockutils [req-595e3b84-0cda-4699-9c7e-c05d4dc2b957 req-52aa9794-ae17-4e6e-bf6b-4c05f07c24e0 service nova] Acquiring lock "78c7a111-d497-4114-b4f4-07319e6e7df2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.445488] env[62619]: DEBUG oslo_concurrency.lockutils [req-595e3b84-0cda-4699-9c7e-c05d4dc2b957 req-52aa9794-ae17-4e6e-bf6b-4c05f07c24e0 service nova] Lock "78c7a111-d497-4114-b4f4-07319e6e7df2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.445488] env[62619]: DEBUG oslo_concurrency.lockutils [req-595e3b84-0cda-4699-9c7e-c05d4dc2b957 req-52aa9794-ae17-4e6e-bf6b-4c05f07c24e0 service nova] Lock "78c7a111-d497-4114-b4f4-07319e6e7df2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.445488] env[62619]: DEBUG nova.compute.manager [req-595e3b84-0cda-4699-9c7e-c05d4dc2b957 req-52aa9794-ae17-4e6e-bf6b-4c05f07c24e0 service nova] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] No waiting events found dispatching network-vif-plugged-88e763dc-8978-45a5-a870-d55441fb392a {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1514.445488] env[62619]: WARNING nova.compute.manager [req-595e3b84-0cda-4699-9c7e-c05d4dc2b957 req-52aa9794-ae17-4e6e-bf6b-4c05f07c24e0 service nova] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Received unexpected event network-vif-plugged-88e763dc-8978-45a5-a870-d55441fb392a for instance with vm_state building and task_state spawning. [ 1514.514931] env[62619]: DEBUG oslo_vmware.api [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777558, 'name': PowerOffVM_Task, 'duration_secs': 0.269239} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.514931] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1514.514931] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1514.514931] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcd557ee-954b-4486-b879-1a72fd965972 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.536767] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777559, 'name': Rename_Task, 'duration_secs': 0.171112} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.536767] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1514.536767] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28b77786-e6a6-4d16-bd32-aee69194bcd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.545505] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1514.545505] env[62619]: value = "task-1777563" [ 1514.545505] env[62619]: _type = "Task" [ 1514.545505] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.555055] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777563, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.558077] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Successfully updated port: 88e763dc-8978-45a5-a870-d55441fb392a {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1514.609667] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1514.609936] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1514.610105] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleting the datastore file [datastore1] 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1514.610601] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a01a3c3-e809-48f7-8e14-4a78447efea7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.617981] env[62619]: DEBUG oslo_vmware.api [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1514.617981] env[62619]: value = "task-1777564" [ 1514.617981] env[62619]: _type = "Task" [ 1514.617981] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.628143] env[62619]: DEBUG oslo_vmware.api [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777564, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.693160] env[62619]: DEBUG oslo_vmware.api [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Task: {'id': task-1777561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141478} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.693433] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1514.693617] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1514.693803] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1514.693951] env[62619]: INFO nova.compute.manager [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1514.694249] env[62619]: DEBUG oslo.service.loopingcall [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1514.694450] env[62619]: DEBUG nova.compute.manager [-] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1514.694543] env[62619]: DEBUG nova.network.neutron [-] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1514.756973] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.757723] env[62619]: DEBUG nova.compute.manager [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1514.761517] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.602s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.761831] env[62619]: DEBUG nova.objects.instance [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lazy-loading 'resources' on Instance uuid 597c0f95-5798-4022-8e2e-89a700698d7a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1515.059226] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777563, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.061979] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "refresh_cache-78c7a111-d497-4114-b4f4-07319e6e7df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.062405] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "refresh_cache-78c7a111-d497-4114-b4f4-07319e6e7df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.062405] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1515.128455] env[62619]: DEBUG oslo_vmware.api [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777564, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180543} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.128706] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1515.128914] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1515.129146] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1515.129384] env[62619]: INFO nova.compute.manager [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1515.129675] env[62619]: DEBUG oslo.service.loopingcall [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1515.129945] env[62619]: DEBUG nova.compute.manager [-] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1515.130148] env[62619]: DEBUG nova.network.neutron [-] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1515.144724] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1515.145097] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1515.145323] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1515.145564] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1515.145783] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1515.145997] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1515.146422] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1515.147633] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1515.147633] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1515.147633] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1515.147633] env[62619]: DEBUG nova.virt.hardware [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1515.148145] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e82d0f5-5e84-4b0f-9a25-8f34a0d9d15f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.157624] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e05ef3e-3110-4610-b0a0-68a099c59888 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.172807] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1515.179324] env[62619]: DEBUG oslo.service.loopingcall [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1515.179836] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a802534f-1766-4ea9-9188-803ef197d775] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1515.180179] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7141d56f-1219-4332-b7be-f1c8f9451cea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.201085] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1515.201085] env[62619]: value = "task-1777566" [ 1515.201085] env[62619]: _type = "Task" [ 1515.201085] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.209889] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777566, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.268217] env[62619]: DEBUG nova.compute.utils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1515.269700] env[62619]: DEBUG nova.compute.manager [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1515.269884] env[62619]: DEBUG nova.network.neutron [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1515.321252] env[62619]: DEBUG nova.policy [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b0ec7a74ec944dd8b0417df0178ab15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e471e7b32b154c1db2eac990fd11e539', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1515.420108] env[62619]: DEBUG nova.compute.manager [req-ab4dd8eb-64ef-41a6-8f20-ea9d2eba3e1e req-ac62b97c-9d40-4469-a75a-f5bc57a02510 service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Received event network-vif-deleted-2cb095b0-1732-485d-a7ea-c306699fa81f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1515.420388] env[62619]: INFO nova.compute.manager [req-ab4dd8eb-64ef-41a6-8f20-ea9d2eba3e1e req-ac62b97c-9d40-4469-a75a-f5bc57a02510 service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Neutron deleted interface 2cb095b0-1732-485d-a7ea-c306699fa81f; detaching it from the instance and deleting it from the info cache [ 1515.420388] env[62619]: DEBUG nova.network.neutron [req-ab4dd8eb-64ef-41a6-8f20-ea9d2eba3e1e req-ac62b97c-9d40-4469-a75a-f5bc57a02510 service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.558790] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777563, 'name': PowerOnVM_Task, 'duration_secs': 0.522426} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.559064] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1515.559267] env[62619]: INFO nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Took 8.78 seconds to spawn the instance on the hypervisor. [ 1515.559437] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1515.560244] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb53d8ea-f0ce-46a6-8599-244c8ade0080 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.607407] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1515.653069] env[62619]: DEBUG nova.network.neutron [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Successfully created port: 8fe434e6-0075-4cc4-b68c-f76dc00d2001 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1515.717182] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777566, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.737521] env[62619]: DEBUG nova.network.neutron [-] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.774336] env[62619]: DEBUG nova.compute.manager [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1515.811142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20b36ac-d4b4-473a-a353-d572a594b4b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.820247] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b69c32-5669-4528-ac25-ae552106e850 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.825107] env[62619]: DEBUG nova.network.neutron [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Updating instance_info_cache with network_info: [{"id": "88e763dc-8978-45a5-a870-d55441fb392a", "address": "fa:16:3e:b8:3f:b1", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88e763dc-89", "ovs_interfaceid": "88e763dc-8978-45a5-a870-d55441fb392a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.865173] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d190ab46-a431-4020-96ec-774070a62f80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.874624] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3da69dc-2c83-405c-b5bc-7385e370ba70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.892531] env[62619]: DEBUG nova.compute.provider_tree [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1515.903990] env[62619]: DEBUG nova.network.neutron [-] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.923028] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4bad8d86-f035-4d5a-8fc6-3775479658b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.933735] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1975bea7-476e-4d50-8403-29938c5d38e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.974835] env[62619]: DEBUG nova.compute.manager [req-ab4dd8eb-64ef-41a6-8f20-ea9d2eba3e1e req-ac62b97c-9d40-4469-a75a-f5bc57a02510 service nova] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Detach interface failed, port_id=2cb095b0-1732-485d-a7ea-c306699fa81f, reason: Instance 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1516.082705] env[62619]: INFO nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Took 36.08 seconds to build instance. [ 1516.213338] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777566, 'name': CreateVM_Task, 'duration_secs': 0.82966} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.213338] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a802534f-1766-4ea9-9188-803ef197d775] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1516.213676] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.213676] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.213951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1516.214266] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76f28c78-b54e-4407-8acf-12c31b1d2738 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.219608] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1516.219608] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5253c97b-d110-1c20-d261-07879110d537" [ 1516.219608] env[62619]: _type = "Task" [ 1516.219608] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.228747] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5253c97b-d110-1c20-d261-07879110d537, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.240395] env[62619]: INFO nova.compute.manager [-] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Took 1.55 seconds to deallocate network for instance. [ 1516.327715] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "refresh_cache-78c7a111-d497-4114-b4f4-07319e6e7df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.328703] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Instance network_info: |[{"id": "88e763dc-8978-45a5-a870-d55441fb392a", "address": "fa:16:3e:b8:3f:b1", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88e763dc-89", "ovs_interfaceid": "88e763dc-8978-45a5-a870-d55441fb392a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1516.329151] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:3f:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '88e763dc-8978-45a5-a870-d55441fb392a', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1516.337807] env[62619]: DEBUG oslo.service.loopingcall [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1516.337904] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1516.338121] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4375aa5-d025-4ae2-973b-03ad911585fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.359783] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1516.359783] env[62619]: value = "task-1777567" [ 1516.359783] env[62619]: _type = "Task" [ 1516.359783] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.370764] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777567, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.400551] env[62619]: DEBUG nova.scheduler.client.report [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1516.411860] env[62619]: INFO nova.compute.manager [-] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Took 1.28 seconds to deallocate network for instance. [ 1516.473471] env[62619]: DEBUG nova.compute.manager [req-5da5c693-b314-41c9-977c-2d5c46e73042 req-9e09a805-2d71-4ac8-8b4f-789f9d7fa721 service nova] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Received event network-changed-88e763dc-8978-45a5-a870-d55441fb392a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1516.473681] env[62619]: DEBUG nova.compute.manager [req-5da5c693-b314-41c9-977c-2d5c46e73042 req-9e09a805-2d71-4ac8-8b4f-789f9d7fa721 service nova] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Refreshing instance network info cache due to event network-changed-88e763dc-8978-45a5-a870-d55441fb392a. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1516.473901] env[62619]: DEBUG oslo_concurrency.lockutils [req-5da5c693-b314-41c9-977c-2d5c46e73042 req-9e09a805-2d71-4ac8-8b4f-789f9d7fa721 service nova] Acquiring lock "refresh_cache-78c7a111-d497-4114-b4f4-07319e6e7df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.474053] env[62619]: DEBUG oslo_concurrency.lockutils [req-5da5c693-b314-41c9-977c-2d5c46e73042 req-9e09a805-2d71-4ac8-8b4f-789f9d7fa721 service nova] Acquired lock "refresh_cache-78c7a111-d497-4114-b4f4-07319e6e7df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.474218] env[62619]: DEBUG nova.network.neutron [req-5da5c693-b314-41c9-977c-2d5c46e73042 req-9e09a805-2d71-4ac8-8b4f-789f9d7fa721 service nova] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Refreshing network info cache for port 88e763dc-8978-45a5-a870-d55441fb392a {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1516.584623] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.465s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.731127] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5253c97b-d110-1c20-d261-07879110d537, 'name': SearchDatastore_Task, 'duration_secs': 0.024183} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.731444] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.731671] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1516.731900] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.732054] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.732237] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1516.732501] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15f59a4e-30d8-43a4-a2ef-303fe080798f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.743274] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1516.743516] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1516.744268] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-084b6fc9-8e60-4cc9-af9b-24e7de850b87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.750436] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.751868] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1516.751868] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f1734a-3fbf-1d0d-f2aa-82431073f0e9" [ 1516.751868] env[62619]: _type = "Task" [ 1516.751868] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.760734] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f1734a-3fbf-1d0d-f2aa-82431073f0e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.784290] env[62619]: DEBUG nova.compute.manager [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1516.812703] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1516.813120] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1516.813338] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1516.813637] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1516.813868] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1516.814129] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1516.814451] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1516.814705] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1516.814995] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1516.815288] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1516.815578] env[62619]: DEBUG nova.virt.hardware [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1516.816849] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb019635-6886-49e8-9538-199902157923 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.829713] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6a27c8-e38b-40ef-900e-9c493025c807 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.872348] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777567, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.906390] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.144s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.907751] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.758s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.909438] env[62619]: INFO nova.compute.claims [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1516.918578] env[62619]: DEBUG oslo_concurrency.lockutils [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.926844] env[62619]: INFO nova.scheduler.client.report [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Deleted allocations for instance 597c0f95-5798-4022-8e2e-89a700698d7a [ 1517.088987] env[62619]: DEBUG nova.compute.manager [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1517.233308] env[62619]: DEBUG nova.network.neutron [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Successfully updated port: 8fe434e6-0075-4cc4-b68c-f76dc00d2001 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1517.270684] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f1734a-3fbf-1d0d-f2aa-82431073f0e9, 'name': SearchDatastore_Task, 'duration_secs': 0.023353} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.274052] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d82d449-6e91-4793-bea3-9236ad3f5494 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.280763] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1517.280763] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526755f2-134c-ad0a-e951-142cc08347d1" [ 1517.280763] env[62619]: _type = "Task" [ 1517.280763] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.289463] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526755f2-134c-ad0a-e951-142cc08347d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.337023] env[62619]: DEBUG nova.network.neutron [req-5da5c693-b314-41c9-977c-2d5c46e73042 req-9e09a805-2d71-4ac8-8b4f-789f9d7fa721 service nova] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Updated VIF entry in instance network info cache for port 88e763dc-8978-45a5-a870-d55441fb392a. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1517.337419] env[62619]: DEBUG nova.network.neutron [req-5da5c693-b314-41c9-977c-2d5c46e73042 req-9e09a805-2d71-4ac8-8b4f-789f9d7fa721 service nova] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Updating instance_info_cache with network_info: [{"id": "88e763dc-8978-45a5-a870-d55441fb392a", "address": "fa:16:3e:b8:3f:b1", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88e763dc-89", "ovs_interfaceid": "88e763dc-8978-45a5-a870-d55441fb392a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.371237] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777567, 'name': CreateVM_Task, 'duration_secs': 0.529309} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.371417] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1517.372101] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.372274] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.372584] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1517.372896] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-850550ee-a986-4de3-80d1-425d76e6c215 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.377935] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1517.377935] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c8aa5-5b07-becb-fedf-36f2cbdb008e" [ 1517.377935] env[62619]: _type = "Task" [ 1517.377935] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.386585] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c8aa5-5b07-becb-fedf-36f2cbdb008e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.435627] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccbb4b9e-ddae-4879-ab2a-e1c7b6d0b9d9 tempest-ServersAaction247Test-1153729813 tempest-ServersAaction247Test-1153729813-project-member] Lock "597c0f95-5798-4022-8e2e-89a700698d7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.589s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.722244] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.736141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "refresh_cache-cd8b8828-79cf-4a7c-b018-b8bd745aaa45" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.739469] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "refresh_cache-cd8b8828-79cf-4a7c-b018-b8bd745aaa45" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.739469] env[62619]: DEBUG nova.network.neutron [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1517.792031] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526755f2-134c-ad0a-e951-142cc08347d1, 'name': SearchDatastore_Task, 'duration_secs': 0.010361} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.792304] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.792561] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a802534f-1766-4ea9-9188-803ef197d775/a802534f-1766-4ea9-9188-803ef197d775.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1517.792932] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3d33f48-89ff-4ad6-9ad9-71f277346716 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.801481] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1517.801481] env[62619]: value = "task-1777568" [ 1517.801481] env[62619]: _type = "Task" [ 1517.801481] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.811480] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.839811] env[62619]: DEBUG oslo_concurrency.lockutils [req-5da5c693-b314-41c9-977c-2d5c46e73042 req-9e09a805-2d71-4ac8-8b4f-789f9d7fa721 service nova] Releasing lock "refresh_cache-78c7a111-d497-4114-b4f4-07319e6e7df2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.841359] env[62619]: DEBUG nova.compute.manager [req-5da5c693-b314-41c9-977c-2d5c46e73042 req-9e09a805-2d71-4ac8-8b4f-789f9d7fa721 service nova] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Received event network-vif-deleted-a1723b81-03da-4080-8d54-cacb839c3f1d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1517.890668] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c8aa5-5b07-becb-fedf-36f2cbdb008e, 'name': SearchDatastore_Task, 'duration_secs': 0.012817} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.890985] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.891246] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1517.891487] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.891631] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.891806] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1517.892160] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4943c54-170a-441b-8be1-b3e5b5cf521a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.903369] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1517.903581] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1517.904277] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-528aa3ce-059d-409e-9e58-d7be362b5e2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.910750] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1517.910750] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526e55b0-3c22-ffac-ba87-655d14eff810" [ 1517.910750] env[62619]: _type = "Task" [ 1517.910750] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.922511] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526e55b0-3c22-ffac-ba87-655d14eff810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.314602] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777568, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.326663] env[62619]: DEBUG nova.network.neutron [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1518.411423] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4d2e5f-9ae4-47a5-9cee-185dbf4af8ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.425311] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526e55b0-3c22-ffac-ba87-655d14eff810, 'name': SearchDatastore_Task, 'duration_secs': 0.015038} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.428260] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4998c8b2-32f6-4af4-9af1-dc9378bdf60f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.431525] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e0587b-38b2-4b41-8e40-c6b97a0552f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.444781] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1518.444781] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52412c7d-e603-808f-43a9-f211b7a5c088" [ 1518.444781] env[62619]: _type = "Task" [ 1518.444781] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.472083] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e88776-7635-4610-a0a4-a2667c1e5ee3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.481740] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52412c7d-e603-808f-43a9-f211b7a5c088, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.485150] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7256833-461d-40fd-bdf7-ee534816ed8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.500650] env[62619]: DEBUG nova.compute.provider_tree [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1518.653141] env[62619]: DEBUG nova.compute.manager [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Received event network-vif-plugged-8fe434e6-0075-4cc4-b68c-f76dc00d2001 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1518.653389] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] Acquiring lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.653590] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] Lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.653748] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] Lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.656015] env[62619]: DEBUG nova.compute.manager [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] No waiting events found dispatching network-vif-plugged-8fe434e6-0075-4cc4-b68c-f76dc00d2001 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1518.656015] env[62619]: WARNING nova.compute.manager [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Received unexpected event network-vif-plugged-8fe434e6-0075-4cc4-b68c-f76dc00d2001 for instance with vm_state building and task_state spawning. [ 1518.656015] env[62619]: DEBUG nova.compute.manager [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Received event network-changed-8fe434e6-0075-4cc4-b68c-f76dc00d2001 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1518.656015] env[62619]: DEBUG nova.compute.manager [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Refreshing instance network info cache due to event network-changed-8fe434e6-0075-4cc4-b68c-f76dc00d2001. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1518.656015] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] Acquiring lock "refresh_cache-cd8b8828-79cf-4a7c-b018-b8bd745aaa45" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.694841] env[62619]: DEBUG nova.network.neutron [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Updating instance_info_cache with network_info: [{"id": "8fe434e6-0075-4cc4-b68c-f76dc00d2001", "address": "fa:16:3e:14:cb:46", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fe434e6-00", "ovs_interfaceid": "8fe434e6-0075-4cc4-b68c-f76dc00d2001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.727667] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Acquiring lock "b1c3c213-599d-4cab-8224-d87467d774c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.727667] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Lock "b1c3c213-599d-4cab-8224-d87467d774c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.814014] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777568, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.728883} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.814372] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a802534f-1766-4ea9-9188-803ef197d775/a802534f-1766-4ea9-9188-803ef197d775.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1518.814614] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1518.814877] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-50941b65-f587-41cc-8aa3-9b376cd56611 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.827333] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1518.827333] env[62619]: value = "task-1777569" [ 1518.827333] env[62619]: _type = "Task" [ 1518.827333] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.845090] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777569, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.979405] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52412c7d-e603-808f-43a9-f211b7a5c088, 'name': SearchDatastore_Task, 'duration_secs': 0.044194} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.979724] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.980038] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 78c7a111-d497-4114-b4f4-07319e6e7df2/78c7a111-d497-4114-b4f4-07319e6e7df2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1518.980453] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62b635d9-ec31-48e6-a069-346ad11afb05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.987754] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1518.987754] env[62619]: value = "task-1777570" [ 1518.987754] env[62619]: _type = "Task" [ 1518.987754] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.996720] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.020826] env[62619]: ERROR nova.scheduler.client.report [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [req-29d0ea70-0b19-4537-af45-ef1b3d4a10ee] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-29d0ea70-0b19-4537-af45-ef1b3d4a10ee"}]} [ 1519.036068] env[62619]: DEBUG nova.scheduler.client.report [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1519.052763] env[62619]: DEBUG nova.scheduler.client.report [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1519.053080] env[62619]: DEBUG nova.compute.provider_tree [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1519.067509] env[62619]: DEBUG nova.scheduler.client.report [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1519.085629] env[62619]: DEBUG nova.scheduler.client.report [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1519.196610] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "refresh_cache-cd8b8828-79cf-4a7c-b018-b8bd745aaa45" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.196938] env[62619]: DEBUG nova.compute.manager [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Instance network_info: |[{"id": "8fe434e6-0075-4cc4-b68c-f76dc00d2001", "address": "fa:16:3e:14:cb:46", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fe434e6-00", "ovs_interfaceid": "8fe434e6-0075-4cc4-b68c-f76dc00d2001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1519.197253] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] Acquired lock "refresh_cache-cd8b8828-79cf-4a7c-b018-b8bd745aaa45" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.197419] env[62619]: DEBUG nova.network.neutron [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Refreshing network info cache for port 8fe434e6-0075-4cc4-b68c-f76dc00d2001 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1519.198598] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:cb:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fe434e6-0075-4cc4-b68c-f76dc00d2001', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1519.206278] env[62619]: DEBUG oslo.service.loopingcall [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.211573] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1519.215020] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24582f95-5bf7-4c20-9c6f-df2d6ad6afad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.242305] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1519.242305] env[62619]: value = "task-1777571" [ 1519.242305] env[62619]: _type = "Task" [ 1519.242305] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.254283] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777571, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.338346] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777569, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069326} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.341042] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1519.342581] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfcc1275-329b-4ec4-89f9-65094ae20edc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.369559] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] a802534f-1766-4ea9-9188-803ef197d775/a802534f-1766-4ea9-9188-803ef197d775.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1519.374099] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fdd38d7-6349-49ab-af06-555ad57925e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.400154] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1519.400154] env[62619]: value = "task-1777572" [ 1519.400154] env[62619]: _type = "Task" [ 1519.400154] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.410304] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777572, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.497552] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.632889] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7288944d-7c1d-419d-a916-391942a7a34e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.641259] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed3f160-c784-4ffa-8f68-afcc6c4ef128 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.675337] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2c5596-bc4c-4311-a1f4-bd96f825d4c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.683937] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d3a1bf-7402-4e73-8118-01ed5685c21b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.699341] env[62619]: DEBUG nova.compute.provider_tree [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1519.757607] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777571, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.827619] env[62619]: DEBUG nova.network.neutron [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Updated VIF entry in instance network info cache for port 8fe434e6-0075-4cc4-b68c-f76dc00d2001. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1519.828017] env[62619]: DEBUG nova.network.neutron [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Updating instance_info_cache with network_info: [{"id": "8fe434e6-0075-4cc4-b68c-f76dc00d2001", "address": "fa:16:3e:14:cb:46", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fe434e6-00", "ovs_interfaceid": "8fe434e6-0075-4cc4-b68c-f76dc00d2001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.914879] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.003566] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777570, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.242747] env[62619]: DEBUG nova.scheduler.client.report [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 70 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1520.244599] env[62619]: DEBUG nova.compute.provider_tree [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 70 to 71 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1520.244951] env[62619]: DEBUG nova.compute.provider_tree [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1520.260812] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777571, 'name': CreateVM_Task, 'duration_secs': 0.562454} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.264738] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1520.264738] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.264738] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.264738] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1520.264738] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90e219ea-9ccb-485f-a603-01961aa73dbc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.268530] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1520.268530] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529f0549-f436-9267-1a8b-9a48e711561c" [ 1520.268530] env[62619]: _type = "Task" [ 1520.268530] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.281049] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529f0549-f436-9267-1a8b-9a48e711561c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.332827] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed512bc4-9e36-450d-9650-49fc0e426b94 req-64d7103f-5338-42da-9fc8-bb9b01cef738 service nova] Releasing lock "refresh_cache-cd8b8828-79cf-4a7c-b018-b8bd745aaa45" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.414727] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777572, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.500728] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777570, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.16358} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.501180] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 78c7a111-d497-4114-b4f4-07319e6e7df2/78c7a111-d497-4114-b4f4-07319e6e7df2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1520.501526] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1520.501956] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a3bc67c-7bbf-4b1f-b841-4914ff2dbb9e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.514143] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1520.514143] env[62619]: value = "task-1777573" [ 1520.514143] env[62619]: _type = "Task" [ 1520.514143] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.522194] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.758025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.848s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.758025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.072s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.759856] env[62619]: INFO nova.compute.claims [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1520.784662] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529f0549-f436-9267-1a8b-9a48e711561c, 'name': SearchDatastore_Task, 'duration_secs': 0.020867} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.785047] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.785312] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1520.785618] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.785713] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.785834] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1520.786695] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10b20cae-a2cc-4ada-9dff-9cd92852b566 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.799361] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1520.800121] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1520.800390] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1857caa7-f63c-4ce9-9f74-6b978af9a9a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.809498] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1520.809498] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dd89e1-ae2f-b8db-a227-5785563717cd" [ 1520.809498] env[62619]: _type = "Task" [ 1520.809498] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.819917] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dd89e1-ae2f-b8db-a227-5785563717cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.917063] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777572, 'name': ReconfigVM_Task, 'duration_secs': 1.053827} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.917063] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Reconfigured VM instance instance-0000002b to attach disk [datastore1] a802534f-1766-4ea9-9188-803ef197d775/a802534f-1766-4ea9-9188-803ef197d775.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1520.917063] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-374ec305-b40d-45eb-93dd-fd4bba297a84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.924681] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1520.924681] env[62619]: value = "task-1777574" [ 1520.924681] env[62619]: _type = "Task" [ 1520.924681] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.940667] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777574, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.023697] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078971} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.023979] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1521.024834] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69eedbf9-257b-400e-8796-7551fd1892de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.049735] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 78c7a111-d497-4114-b4f4-07319e6e7df2/78c7a111-d497-4114-b4f4-07319e6e7df2.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1521.050354] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c85121f8-2e3d-47d3-8bbf-b55f20b3e4f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.072051] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1521.072051] env[62619]: value = "task-1777575" [ 1521.072051] env[62619]: _type = "Task" [ 1521.072051] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.087156] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777575, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.259271] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquiring lock "3764a198-cd1b-4982-9d0c-953a74f845ef" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.259539] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "3764a198-cd1b-4982-9d0c-953a74f845ef" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.323579] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dd89e1-ae2f-b8db-a227-5785563717cd, 'name': SearchDatastore_Task, 'duration_secs': 0.031373} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.324587] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1696855-c30d-4b0c-b99b-054c797fe9e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.330744] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1521.330744] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52379adc-9987-abe2-3596-6fe7f927ae30" [ 1521.330744] env[62619]: _type = "Task" [ 1521.330744] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.342553] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52379adc-9987-abe2-3596-6fe7f927ae30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.434885] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777574, 'name': Rename_Task, 'duration_secs': 0.508733} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.435228] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1521.435632] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-163224d6-1b51-4ff0-b0a2-e3d8d614d3eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.447062] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1521.447062] env[62619]: value = "task-1777576" [ 1521.447062] env[62619]: _type = "Task" [ 1521.447062] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.451743] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.584520] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.763233] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "3764a198-cd1b-4982-9d0c-953a74f845ef" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.763973] env[62619]: DEBUG nova.compute.manager [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1521.852968] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52379adc-9987-abe2-3596-6fe7f927ae30, 'name': SearchDatastore_Task, 'duration_secs': 0.035975} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.854159] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.854159] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cd8b8828-79cf-4a7c-b018-b8bd745aaa45/cd8b8828-79cf-4a7c-b018-b8bd745aaa45.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1521.854159] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30e09e4a-299b-4e0f-810c-82192b383b28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.866294] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1521.866294] env[62619]: value = "task-1777577" [ 1521.866294] env[62619]: _type = "Task" [ 1521.866294] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.881953] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777577, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.911897] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquiring lock "40eeb844-7423-4818-8095-81062c7e6392" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.912153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Lock "40eeb844-7423-4818-8095-81062c7e6392" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.912770] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquiring lock "40eeb844-7423-4818-8095-81062c7e6392-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.912770] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Lock "40eeb844-7423-4818-8095-81062c7e6392-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.912952] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Lock "40eeb844-7423-4818-8095-81062c7e6392-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.919346] env[62619]: INFO nova.compute.manager [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Terminating instance [ 1521.954622] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777576, 'name': PowerOnVM_Task} progress is 97%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.090013] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777575, 'name': ReconfigVM_Task, 'duration_secs': 0.733708} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.094014] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 78c7a111-d497-4114-b4f4-07319e6e7df2/78c7a111-d497-4114-b4f4-07319e6e7df2.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1522.094014] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d4d6821-cf61-47ac-bf78-9bf8a53ae006 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.105853] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1522.105853] env[62619]: value = "task-1777578" [ 1522.105853] env[62619]: _type = "Task" [ 1522.105853] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.121587] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777578, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.272510] env[62619]: DEBUG nova.compute.utils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1522.279623] env[62619]: DEBUG nova.compute.manager [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1522.279623] env[62619]: DEBUG nova.network.neutron [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1522.355377] env[62619]: DEBUG nova.policy [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb5e970925d04b35a420d4c7387d7a10', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e16f30fbe8244df827ce093504d4c2d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1522.362345] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7b9e02-c541-48ec-abfe-ac74ee530105 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.373948] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88a1da7-2eda-44ae-ac75-a01b5725a6b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.380559] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777577, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.415430] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5d0a74-092a-42c4-b218-0309d2a1b78e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.425914] env[62619]: DEBUG nova.compute.manager [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1522.425914] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1522.427330] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a4d160-442e-48f4-b439-875495417240 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.431439] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f163fc-924a-4d16-acec-bcb2a227421b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.447550] env[62619]: DEBUG nova.compute.provider_tree [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1522.454274] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1522.454828] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d417d253-72ec-47d0-bfdd-6919c4989515 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.465794] env[62619]: DEBUG oslo_vmware.api [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777576, 'name': PowerOnVM_Task, 'duration_secs': 0.570019} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.467919] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1522.468160] env[62619]: DEBUG nova.compute.manager [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1522.468503] env[62619]: DEBUG oslo_vmware.api [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for the task: (returnval){ [ 1522.468503] env[62619]: value = "task-1777579" [ 1522.468503] env[62619]: _type = "Task" [ 1522.468503] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.469235] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9487a5bd-7f19-4ef4-8d9f-8922d092db15 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.484294] env[62619]: DEBUG oslo_vmware.api [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777579, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.615837] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777578, 'name': Rename_Task, 'duration_secs': 0.305729} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.616101] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1522.616535] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f128cdc-f1fb-4502-a9a9-417fda4aceae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.625038] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1522.625038] env[62619]: value = "task-1777580" [ 1522.625038] env[62619]: _type = "Task" [ 1522.625038] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.634364] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.634609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.640778] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777580, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.781320] env[62619]: DEBUG nova.compute.manager [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1522.803055] env[62619]: DEBUG nova.network.neutron [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Successfully created port: 32c0a217-0471-4782-8add-8ba1f15d5c75 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1522.877891] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777577, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554556} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.878171] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cd8b8828-79cf-4a7c-b018-b8bd745aaa45/cd8b8828-79cf-4a7c-b018-b8bd745aaa45.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1522.878392] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1522.878647] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7596939-aee3-46e5-a870-1951f5786b22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.885914] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1522.885914] env[62619]: value = "task-1777581" [ 1522.885914] env[62619]: _type = "Task" [ 1522.885914] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.896970] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777581, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.977202] env[62619]: ERROR nova.scheduler.client.report [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [req-000f1140-dde9-44bb-ab3c-45c7ee525aee] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-000f1140-dde9-44bb-ab3c-45c7ee525aee"}]} [ 1522.983619] env[62619]: DEBUG oslo_vmware.api [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777579, 'name': PowerOffVM_Task, 'duration_secs': 0.232176} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.983619] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1522.983619] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1522.983619] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31799268-486a-47d3-9d46-05c93ca32a43 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.005769] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.012869] env[62619]: DEBUG nova.scheduler.client.report [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1523.040708] env[62619]: DEBUG nova.scheduler.client.report [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1523.040946] env[62619]: DEBUG nova.compute.provider_tree [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1523.056058] env[62619]: DEBUG nova.scheduler.client.report [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1523.072960] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1523.073227] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1523.073433] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Deleting the datastore file [datastore1] 40eeb844-7423-4818-8095-81062c7e6392 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1523.073721] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0f3d93d-fe6d-493c-9d51-f3948d0b3573 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.081576] env[62619]: DEBUG oslo_vmware.api [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for the task: (returnval){ [ 1523.081576] env[62619]: value = "task-1777583" [ 1523.081576] env[62619]: _type = "Task" [ 1523.081576] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.091419] env[62619]: DEBUG oslo_vmware.api [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.092345] env[62619]: DEBUG nova.scheduler.client.report [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1523.139476] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.397092] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777581, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075814} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.397092] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1523.397092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d699c2d-b7c9-4980-8198-3445837ac40d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.425430] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] cd8b8828-79cf-4a7c-b018-b8bd745aaa45/cd8b8828-79cf-4a7c-b018-b8bd745aaa45.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1523.428899] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-287b0a75-3a55-451b-ba32-c70a9aae557b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.451703] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1523.451703] env[62619]: value = "task-1777584" [ 1523.451703] env[62619]: _type = "Task" [ 1523.451703] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.465358] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777584, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.594456] env[62619]: DEBUG oslo_vmware.api [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.622900] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa053b99-babd-4dde-b429-4221ec59fdcf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.632996] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab826d6-ab65-4868-bf49-a32c93886f10 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.639169] env[62619]: DEBUG oslo_vmware.api [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777580, 'name': PowerOnVM_Task, 'duration_secs': 0.752576} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.639797] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1523.640012] env[62619]: INFO nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Took 9.45 seconds to spawn the instance on the hypervisor. [ 1523.640227] env[62619]: DEBUG nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1523.640965] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493c0bc7-9b40-45bd-a492-2e45608ec9c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.675666] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37883e72-2c97-4fb5-9ba8-05ee03574458 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.687355] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acac93c2-100b-4ccd-8e04-d22e68a179f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.704224] env[62619]: DEBUG nova.compute.provider_tree [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1523.796626] env[62619]: DEBUG nova.compute.manager [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1523.820797] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1523.821133] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1523.821341] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1523.821539] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1523.821683] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1523.821827] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1523.822041] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1523.822203] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1523.822370] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1523.822529] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1523.822696] env[62619]: DEBUG nova.virt.hardware [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1523.823607] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c326ef1-3483-4309-a3b1-809e027f50ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.832209] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1892494d-2028-417a-8cfb-b0eeb6431232 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.962545] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777584, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.092359] env[62619]: DEBUG oslo_vmware.api [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Task: {'id': task-1777583, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.64484} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.092609] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1524.092792] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1524.092971] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1524.093170] env[62619]: INFO nova.compute.manager [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1524.093434] env[62619]: DEBUG oslo.service.loopingcall [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.093627] env[62619]: DEBUG nova.compute.manager [-] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1524.093717] env[62619]: DEBUG nova.network.neutron [-] [instance: 40eeb844-7423-4818-8095-81062c7e6392] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1524.177578] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "a802534f-1766-4ea9-9188-803ef197d775" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.177838] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "a802534f-1766-4ea9-9188-803ef197d775" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.178054] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "a802534f-1766-4ea9-9188-803ef197d775-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.178256] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "a802534f-1766-4ea9-9188-803ef197d775-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.178452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "a802534f-1766-4ea9-9188-803ef197d775-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.180594] env[62619]: INFO nova.compute.manager [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Terminating instance [ 1524.189435] env[62619]: INFO nova.compute.manager [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Took 40.15 seconds to build instance. [ 1524.240591] env[62619]: DEBUG nova.scheduler.client.report [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1524.240591] env[62619]: DEBUG nova.compute.provider_tree [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 72 to 73 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1524.240591] env[62619]: DEBUG nova.compute.provider_tree [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1524.466595] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777584, 'name': ReconfigVM_Task, 'duration_secs': 0.815399} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.466988] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Reconfigured VM instance instance-0000002e to attach disk [datastore1] cd8b8828-79cf-4a7c-b018-b8bd745aaa45/cd8b8828-79cf-4a7c-b018-b8bd745aaa45.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1524.467627] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3f4003c-2f59-4e16-b39e-567a887bc741 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.477347] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1524.477347] env[62619]: value = "task-1777585" [ 1524.477347] env[62619]: _type = "Task" [ 1524.477347] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.497087] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777585, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.601124] env[62619]: DEBUG nova.compute.manager [req-aa88945e-e08f-43f2-9fe8-71c2d2f0a801 req-50d9fb1a-ef42-4490-9e25-122a8f562088 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Received event network-vif-plugged-32c0a217-0471-4782-8add-8ba1f15d5c75 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1524.601377] env[62619]: DEBUG oslo_concurrency.lockutils [req-aa88945e-e08f-43f2-9fe8-71c2d2f0a801 req-50d9fb1a-ef42-4490-9e25-122a8f562088 service nova] Acquiring lock "39adf15c-f77e-4737-aeeb-258887007b9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.601862] env[62619]: DEBUG oslo_concurrency.lockutils [req-aa88945e-e08f-43f2-9fe8-71c2d2f0a801 req-50d9fb1a-ef42-4490-9e25-122a8f562088 service nova] Lock "39adf15c-f77e-4737-aeeb-258887007b9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.602538] env[62619]: DEBUG oslo_concurrency.lockutils [req-aa88945e-e08f-43f2-9fe8-71c2d2f0a801 req-50d9fb1a-ef42-4490-9e25-122a8f562088 service nova] Lock "39adf15c-f77e-4737-aeeb-258887007b9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.602783] env[62619]: DEBUG nova.compute.manager [req-aa88945e-e08f-43f2-9fe8-71c2d2f0a801 req-50d9fb1a-ef42-4490-9e25-122a8f562088 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] No waiting events found dispatching network-vif-plugged-32c0a217-0471-4782-8add-8ba1f15d5c75 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1524.602965] env[62619]: WARNING nova.compute.manager [req-aa88945e-e08f-43f2-9fe8-71c2d2f0a801 req-50d9fb1a-ef42-4490-9e25-122a8f562088 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Received unexpected event network-vif-plugged-32c0a217-0471-4782-8add-8ba1f15d5c75 for instance with vm_state building and task_state spawning. [ 1524.691188] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3442a72-ee48-4dc5-9410-5280f58781d8 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "78c7a111-d497-4114-b4f4-07319e6e7df2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.534s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.692179] env[62619]: DEBUG nova.network.neutron [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Successfully updated port: 32c0a217-0471-4782-8add-8ba1f15d5c75 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1524.698902] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "refresh_cache-a802534f-1766-4ea9-9188-803ef197d775" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.699143] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired lock "refresh_cache-a802534f-1766-4ea9-9188-803ef197d775" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.699264] env[62619]: DEBUG nova.network.neutron [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1524.702418] env[62619]: DEBUG nova.compute.manager [req-ad469a24-da0f-4a7e-b166-a19f5165392f req-66563cd5-d563-41a5-81e1-0c3941b4ecc1 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Received event network-vif-deleted-5c068735-1d99-4f86-a405-99a38588ba2b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1524.702603] env[62619]: INFO nova.compute.manager [req-ad469a24-da0f-4a7e-b166-a19f5165392f req-66563cd5-d563-41a5-81e1-0c3941b4ecc1 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Neutron deleted interface 5c068735-1d99-4f86-a405-99a38588ba2b; detaching it from the instance and deleting it from the info cache [ 1524.702768] env[62619]: DEBUG nova.network.neutron [req-ad469a24-da0f-4a7e-b166-a19f5165392f req-66563cd5-d563-41a5-81e1-0c3941b4ecc1 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.746278] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.988s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.746755] env[62619]: DEBUG nova.compute.manager [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1524.749635] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.428s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.751092] env[62619]: INFO nova.compute.claims [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1524.988938] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777585, 'name': Rename_Task, 'duration_secs': 0.169375} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.989233] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1524.989478] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43a0938b-720e-4562-829b-204ac9cc7f14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.998456] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1524.998456] env[62619]: value = "task-1777586" [ 1524.998456] env[62619]: _type = "Task" [ 1524.998456] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.005699] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777586, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.127440] env[62619]: DEBUG nova.network.neutron [-] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.202392] env[62619]: DEBUG nova.compute.manager [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1525.208543] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquiring lock "refresh_cache-39adf15c-f77e-4737-aeeb-258887007b9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.208543] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquired lock "refresh_cache-39adf15c-f77e-4737-aeeb-258887007b9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.208543] env[62619]: DEBUG nova.network.neutron [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1525.212581] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33e44dbc-6456-45db-b5b0-be6abe077b2c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.225480] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0820b7d9-2ff6-4fe4-b661-80a1676e1f95 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.236749] env[62619]: DEBUG nova.network.neutron [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1525.268338] env[62619]: DEBUG nova.compute.utils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1525.269608] env[62619]: DEBUG nova.compute.manager [req-ad469a24-da0f-4a7e-b166-a19f5165392f req-66563cd5-d563-41a5-81e1-0c3941b4ecc1 service nova] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Detach interface failed, port_id=5c068735-1d99-4f86-a405-99a38588ba2b, reason: Instance 40eeb844-7423-4818-8095-81062c7e6392 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1525.272978] env[62619]: DEBUG nova.compute.manager [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1525.273157] env[62619]: DEBUG nova.network.neutron [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1525.313298] env[62619]: DEBUG nova.policy [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cde3ba9ee004055bb5e09bc932dc4f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0948c27a2b08413ba82d553452965c9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1525.448290] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.448584] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.448794] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.448975] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.449378] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.451807] env[62619]: INFO nova.compute.manager [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Terminating instance [ 1525.509069] env[62619]: DEBUG oslo_vmware.api [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777586, 'name': PowerOnVM_Task, 'duration_secs': 0.509015} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.509069] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1525.509069] env[62619]: INFO nova.compute.manager [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Took 8.72 seconds to spawn the instance on the hypervisor. [ 1525.509069] env[62619]: DEBUG nova.compute.manager [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1525.509874] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0624e979-91cb-4728-a972-4404015a1690 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.512940] env[62619]: DEBUG oslo_concurrency.lockutils [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "78c7a111-d497-4114-b4f4-07319e6e7df2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.513016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "78c7a111-d497-4114-b4f4-07319e6e7df2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.513245] env[62619]: DEBUG oslo_concurrency.lockutils [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "78c7a111-d497-4114-b4f4-07319e6e7df2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.513448] env[62619]: DEBUG oslo_concurrency.lockutils [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "78c7a111-d497-4114-b4f4-07319e6e7df2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.513609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "78c7a111-d497-4114-b4f4-07319e6e7df2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.516102] env[62619]: INFO nova.compute.manager [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Terminating instance [ 1525.526094] env[62619]: DEBUG nova.network.neutron [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.579745] env[62619]: DEBUG nova.network.neutron [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Successfully created port: 19adac57-34a5-41ed-8245-9f4cef383981 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1525.632284] env[62619]: INFO nova.compute.manager [-] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Took 1.54 seconds to deallocate network for instance. [ 1525.735112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.757088] env[62619]: DEBUG nova.network.neutron [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1525.771072] env[62619]: DEBUG nova.compute.manager [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1525.956134] env[62619]: DEBUG nova.compute.manager [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1525.956419] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1525.957690] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a63087-318f-45cc-8e51-a99b15f0507a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.970938] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1525.971933] env[62619]: DEBUG nova.network.neutron [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Updating instance_info_cache with network_info: [{"id": "32c0a217-0471-4782-8add-8ba1f15d5c75", "address": "fa:16:3e:ac:46:6e", "network": {"id": "fc034058-2548-4339-96ae-60b10419f7a4", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1830513641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e16f30fbe8244df827ce093504d4c2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c0a217-04", "ovs_interfaceid": "32c0a217-0471-4782-8add-8ba1f15d5c75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.973042] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fec5244f-2b41-469a-8f3f-25bc0ea6ceb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.980827] env[62619]: DEBUG oslo_vmware.api [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1525.980827] env[62619]: value = "task-1777587" [ 1525.980827] env[62619]: _type = "Task" [ 1525.980827] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.993455] env[62619]: DEBUG oslo_vmware.api [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777587, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.031919] env[62619]: DEBUG nova.compute.manager [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1526.032217] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1526.034738] env[62619]: INFO nova.compute.manager [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Took 41.81 seconds to build instance. [ 1526.037083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Releasing lock "refresh_cache-a802534f-1766-4ea9-9188-803ef197d775" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.037460] env[62619]: DEBUG nova.compute.manager [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1526.037635] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1526.038719] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b89f171-b8b2-44ac-84a5-4a437a75584e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.044945] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e6ca45-28df-484a-98ec-fd5cec81071a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.054573] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1526.056664] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c95bd7a6-f1a4-4497-af44-326204774907 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.057743] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1526.060161] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d80f0454-3823-4ced-b9cc-73080c820693 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.067567] env[62619]: DEBUG oslo_vmware.api [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1526.067567] env[62619]: value = "task-1777589" [ 1526.067567] env[62619]: _type = "Task" [ 1526.067567] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.068798] env[62619]: DEBUG oslo_vmware.api [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1526.068798] env[62619]: value = "task-1777588" [ 1526.068798] env[62619]: _type = "Task" [ 1526.068798] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.088343] env[62619]: DEBUG oslo_vmware.api [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777589, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.088574] env[62619]: DEBUG oslo_vmware.api [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777588, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.140382] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.289785] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e752528d-790f-412e-83b5-be757baafddb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.298285] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475f8098-6206-4b98-a7f2-6a25283141c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.333146] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cb5ae1-e1c8-4ba4-b5cb-16df32a3554c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.341773] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43eb5bb3-af2c-4312-b90a-a7d45cb104f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.356390] env[62619]: DEBUG nova.compute.provider_tree [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1526.478304] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Releasing lock "refresh_cache-39adf15c-f77e-4737-aeeb-258887007b9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.478642] env[62619]: DEBUG nova.compute.manager [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Instance network_info: |[{"id": "32c0a217-0471-4782-8add-8ba1f15d5c75", "address": "fa:16:3e:ac:46:6e", "network": {"id": "fc034058-2548-4339-96ae-60b10419f7a4", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1830513641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e16f30fbe8244df827ce093504d4c2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c0a217-04", "ovs_interfaceid": "32c0a217-0471-4782-8add-8ba1f15d5c75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1526.479075] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:46:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32c0a217-0471-4782-8add-8ba1f15d5c75', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1526.486539] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Creating folder: Project (1e16f30fbe8244df827ce093504d4c2d). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1526.486866] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe9cc14c-ede9-4dc6-a91d-ed16a7f7be26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.497899] env[62619]: DEBUG oslo_vmware.api [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777587, 'name': PowerOffVM_Task, 'duration_secs': 0.288702} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.498158] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1526.498328] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1526.498571] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50868a00-c140-415d-be8f-3cea64e8acfe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.501982] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Created folder: Project (1e16f30fbe8244df827ce093504d4c2d) in parent group-v368875. [ 1526.502175] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Creating folder: Instances. Parent ref: group-v369012. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1526.502387] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a74598f-e0a9-4f92-8eae-30724e3133ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.512519] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Created folder: Instances in parent group-v369012. [ 1526.512822] env[62619]: DEBUG oslo.service.loopingcall [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1526.512938] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1526.513163] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42e45a5c-e7cc-4470-a7ee-600bb3e72bce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.532803] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1526.532803] env[62619]: value = "task-1777593" [ 1526.532803] env[62619]: _type = "Task" [ 1526.532803] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.546608] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6696be48-83d8-48f1-b8ff-0e427b4fa367 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.004s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.546799] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777593, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.550525] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] Acquiring lock "refresh_cache-cd8b8828-79cf-4a7c-b018-b8bd745aaa45" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.550698] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] Acquired lock "refresh_cache-cd8b8828-79cf-4a7c-b018-b8bd745aaa45" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.551249] env[62619]: DEBUG nova.network.neutron [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1526.581601] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1526.581817] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1526.582011] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleting the datastore file [datastore1] ca5f5f6b-5303-4af4-adaa-e4aac72a90f8 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1526.587844] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d79109e-572f-43d1-9157-68d78c560210 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.590201] env[62619]: DEBUG oslo_vmware.api [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777589, 'name': PowerOffVM_Task, 'duration_secs': 0.192847} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.590283] env[62619]: DEBUG oslo_vmware.api [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777588, 'name': PowerOffVM_Task, 'duration_secs': 0.210291} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.590514] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1526.590676] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1526.590926] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1526.591124] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1526.591666] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bf674c0-91ee-4f7e-96dd-384ee35443b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.593074] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8682b102-e338-4eb4-a006-3e5067e5297c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.599949] env[62619]: DEBUG oslo_vmware.api [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1526.599949] env[62619]: value = "task-1777594" [ 1526.599949] env[62619]: _type = "Task" [ 1526.599949] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.610145] env[62619]: DEBUG oslo_vmware.api [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.631030] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1526.631356] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1526.631586] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Deleting the datastore file [datastore1] a802534f-1766-4ea9-9188-803ef197d775 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1526.632225] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f2a6fe0-8f01-40b8-abd1-2aefc89dbc2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.641440] env[62619]: DEBUG oslo_vmware.api [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1526.641440] env[62619]: value = "task-1777597" [ 1526.641440] env[62619]: _type = "Task" [ 1526.641440] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.642746] env[62619]: DEBUG nova.compute.manager [req-c26a4851-bd13-47bc-99d6-dc061d042f09 req-02ab4871-e9f3-4615-b081-054b76110262 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Received event network-changed-32c0a217-0471-4782-8add-8ba1f15d5c75 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1526.642929] env[62619]: DEBUG nova.compute.manager [req-c26a4851-bd13-47bc-99d6-dc061d042f09 req-02ab4871-e9f3-4615-b081-054b76110262 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Refreshing instance network info cache due to event network-changed-32c0a217-0471-4782-8add-8ba1f15d5c75. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1526.643147] env[62619]: DEBUG oslo_concurrency.lockutils [req-c26a4851-bd13-47bc-99d6-dc061d042f09 req-02ab4871-e9f3-4615-b081-054b76110262 service nova] Acquiring lock "refresh_cache-39adf15c-f77e-4737-aeeb-258887007b9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.643338] env[62619]: DEBUG oslo_concurrency.lockutils [req-c26a4851-bd13-47bc-99d6-dc061d042f09 req-02ab4871-e9f3-4615-b081-054b76110262 service nova] Acquired lock "refresh_cache-39adf15c-f77e-4737-aeeb-258887007b9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.643503] env[62619]: DEBUG nova.network.neutron [req-c26a4851-bd13-47bc-99d6-dc061d042f09 req-02ab4871-e9f3-4615-b081-054b76110262 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Refreshing network info cache for port 32c0a217-0471-4782-8add-8ba1f15d5c75 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1526.655872] env[62619]: DEBUG oslo_vmware.api [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777597, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.706210] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1526.706546] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1526.706677] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleting the datastore file [datastore1] 78c7a111-d497-4114-b4f4-07319e6e7df2 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1526.706944] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ace31048-c7a9-495d-8fe4-fffd1de9aad6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.715741] env[62619]: DEBUG oslo_vmware.api [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1526.715741] env[62619]: value = "task-1777598" [ 1526.715741] env[62619]: _type = "Task" [ 1526.715741] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.723796] env[62619]: DEBUG oslo_vmware.api [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.794804] env[62619]: DEBUG nova.compute.manager [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1526.825450] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1526.825742] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1526.825901] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1526.826163] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1526.826322] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1526.826475] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1526.826676] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1526.826828] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1526.826990] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1526.827171] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1526.827339] env[62619]: DEBUG nova.virt.hardware [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1526.828213] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32017b6-f342-4af1-965c-3363040d96e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.836463] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eec7738-279e-4ef8-baf0-84319ca0b103 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.859924] env[62619]: DEBUG nova.scheduler.client.report [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1527.044950] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777593, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.052690] env[62619]: DEBUG nova.compute.manager [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1527.111442] env[62619]: DEBUG nova.compute.manager [req-fe83a578-7ca9-4f11-a671-f92d2c2e1a18 req-2c18dec5-08a1-4773-9014-8134a5de76ac service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Received event network-vif-plugged-19adac57-34a5-41ed-8245-9f4cef383981 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1527.111681] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe83a578-7ca9-4f11-a671-f92d2c2e1a18 req-2c18dec5-08a1-4773-9014-8134a5de76ac service nova] Acquiring lock "eca829be-d425-4668-9ebd-1247c5ff19d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.112048] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe83a578-7ca9-4f11-a671-f92d2c2e1a18 req-2c18dec5-08a1-4773-9014-8134a5de76ac service nova] Lock "eca829be-d425-4668-9ebd-1247c5ff19d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.112110] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe83a578-7ca9-4f11-a671-f92d2c2e1a18 req-2c18dec5-08a1-4773-9014-8134a5de76ac service nova] Lock "eca829be-d425-4668-9ebd-1247c5ff19d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.112251] env[62619]: DEBUG nova.compute.manager [req-fe83a578-7ca9-4f11-a671-f92d2c2e1a18 req-2c18dec5-08a1-4773-9014-8134a5de76ac service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] No waiting events found dispatching network-vif-plugged-19adac57-34a5-41ed-8245-9f4cef383981 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1527.112426] env[62619]: WARNING nova.compute.manager [req-fe83a578-7ca9-4f11-a671-f92d2c2e1a18 req-2c18dec5-08a1-4773-9014-8134a5de76ac service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Received unexpected event network-vif-plugged-19adac57-34a5-41ed-8245-9f4cef383981 for instance with vm_state building and task_state spawning. [ 1527.118020] env[62619]: DEBUG oslo_vmware.api [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777594, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263718} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.118020] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1527.118020] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1527.118020] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1527.118020] env[62619]: INFO nova.compute.manager [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1527.118854] env[62619]: DEBUG oslo.service.loopingcall [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.118854] env[62619]: DEBUG nova.compute.manager [-] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1527.118854] env[62619]: DEBUG nova.network.neutron [-] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1527.157328] env[62619]: DEBUG nova.network.neutron [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Successfully updated port: 19adac57-34a5-41ed-8245-9f4cef383981 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1527.165708] env[62619]: DEBUG oslo_vmware.api [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777597, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185127} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.165975] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1527.166175] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1527.166388] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1527.166523] env[62619]: INFO nova.compute.manager [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1527.166750] env[62619]: DEBUG oslo.service.loopingcall [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.166938] env[62619]: DEBUG nova.compute.manager [-] [instance: a802534f-1766-4ea9-9188-803ef197d775] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1527.167034] env[62619]: DEBUG nova.network.neutron [-] [instance: a802534f-1766-4ea9-9188-803ef197d775] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1527.186819] env[62619]: DEBUG nova.network.neutron [-] [instance: a802534f-1766-4ea9-9188-803ef197d775] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1527.229081] env[62619]: DEBUG oslo_vmware.api [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195904} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.230048] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1527.230048] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1527.230048] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1527.230048] env[62619]: INFO nova.compute.manager [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1527.230274] env[62619]: DEBUG oslo.service.loopingcall [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.230343] env[62619]: DEBUG nova.compute.manager [-] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1527.230435] env[62619]: DEBUG nova.network.neutron [-] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1527.365985] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.616s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.366535] env[62619]: DEBUG nova.compute.manager [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1527.369935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.682s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.372706] env[62619]: INFO nova.compute.claims [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1527.544319] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777593, 'name': CreateVM_Task, 'duration_secs': 0.695206} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.544724] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1527.545113] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1527.545283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.545581] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1527.545827] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6754c2f2-e1f7-40d3-b928-55a04980705e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.551126] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for the task: (returnval){ [ 1527.551126] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5297fa2c-075f-a7fa-41fd-37d15847dd3f" [ 1527.551126] env[62619]: _type = "Task" [ 1527.551126] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.564053] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5297fa2c-075f-a7fa-41fd-37d15847dd3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.579669] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.660529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "refresh_cache-eca829be-d425-4668-9ebd-1247c5ff19d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1527.660763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "refresh_cache-eca829be-d425-4668-9ebd-1247c5ff19d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.660927] env[62619]: DEBUG nova.network.neutron [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1527.690499] env[62619]: DEBUG nova.network.neutron [-] [instance: a802534f-1766-4ea9-9188-803ef197d775] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1527.722904] env[62619]: DEBUG nova.network.neutron [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Updating instance_info_cache with network_info: [{"id": "8fe434e6-0075-4cc4-b68c-f76dc00d2001", "address": "fa:16:3e:14:cb:46", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fe434e6-00", "ovs_interfaceid": "8fe434e6-0075-4cc4-b68c-f76dc00d2001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1527.817173] env[62619]: DEBUG nova.network.neutron [req-c26a4851-bd13-47bc-99d6-dc061d042f09 req-02ab4871-e9f3-4615-b081-054b76110262 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Updated VIF entry in instance network info cache for port 32c0a217-0471-4782-8add-8ba1f15d5c75. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1527.817577] env[62619]: DEBUG nova.network.neutron [req-c26a4851-bd13-47bc-99d6-dc061d042f09 req-02ab4871-e9f3-4615-b081-054b76110262 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Updating instance_info_cache with network_info: [{"id": "32c0a217-0471-4782-8add-8ba1f15d5c75", "address": "fa:16:3e:ac:46:6e", "network": {"id": "fc034058-2548-4339-96ae-60b10419f7a4", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1830513641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e16f30fbe8244df827ce093504d4c2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c0a217-04", "ovs_interfaceid": "32c0a217-0471-4782-8add-8ba1f15d5c75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1527.871513] env[62619]: DEBUG nova.compute.utils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1527.872839] env[62619]: DEBUG nova.compute.manager [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1527.873017] env[62619]: DEBUG nova.network.neutron [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1527.942824] env[62619]: DEBUG nova.policy [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb56261151994b459d40b190725f3867', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'af811eaa982a4c329e8ab9b58f4c8695', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1528.062063] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5297fa2c-075f-a7fa-41fd-37d15847dd3f, 'name': SearchDatastore_Task, 'duration_secs': 0.024072} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.062367] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.062615] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1528.062852] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.062995] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.063184] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1528.063475] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-137df5a1-a367-46b1-8932-d6f594f096d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.073097] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1528.073350] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1528.074235] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8afe14c8-6f96-4d16-a7fd-a21ed05a38bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.080811] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for the task: (returnval){ [ 1528.080811] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52de135d-62c1-fbcd-8216-ce6ae16ca609" [ 1528.080811] env[62619]: _type = "Task" [ 1528.080811] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.090215] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52de135d-62c1-fbcd-8216-ce6ae16ca609, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.152889] env[62619]: DEBUG nova.network.neutron [-] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.192967] env[62619]: INFO nova.compute.manager [-] [instance: a802534f-1766-4ea9-9188-803ef197d775] Took 1.03 seconds to deallocate network for instance. [ 1528.228319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] Releasing lock "refresh_cache-cd8b8828-79cf-4a7c-b018-b8bd745aaa45" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.228585] env[62619]: DEBUG nova.compute.manager [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Inject network info {{(pid=62619) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7600}} [ 1528.228979] env[62619]: DEBUG nova.compute.manager [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] network_info to inject: |[{"id": "8fe434e6-0075-4cc4-b68c-f76dc00d2001", "address": "fa:16:3e:14:cb:46", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fe434e6-00", "ovs_interfaceid": "8fe434e6-0075-4cc4-b68c-f76dc00d2001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7601}} [ 1528.234295] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Reconfiguring VM instance to set the machine id {{(pid=62619) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1528.234862] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f8c1547-4994-4ae2-b16d-b643a1db4772 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.247099] env[62619]: DEBUG nova.network.neutron [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1528.255968] env[62619]: DEBUG oslo_vmware.api [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] Waiting for the task: (returnval){ [ 1528.255968] env[62619]: value = "task-1777599" [ 1528.255968] env[62619]: _type = "Task" [ 1528.255968] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.266900] env[62619]: DEBUG oslo_vmware.api [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] Task: {'id': task-1777599, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.293883] env[62619]: DEBUG nova.network.neutron [-] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.320586] env[62619]: DEBUG oslo_concurrency.lockutils [req-c26a4851-bd13-47bc-99d6-dc061d042f09 req-02ab4871-e9f3-4615-b081-054b76110262 service nova] Releasing lock "refresh_cache-39adf15c-f77e-4737-aeeb-258887007b9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.376221] env[62619]: DEBUG nova.compute.manager [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1528.499216] env[62619]: DEBUG nova.network.neutron [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Successfully created port: b1ace9af-97b6-4d21-bbe4-972a2a1c1e13 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1528.571083] env[62619]: DEBUG nova.network.neutron [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Updating instance_info_cache with network_info: [{"id": "19adac57-34a5-41ed-8245-9f4cef383981", "address": "fa:16:3e:e1:a9:c6", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19adac57-34", "ovs_interfaceid": "19adac57-34a5-41ed-8245-9f4cef383981", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.591663] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52de135d-62c1-fbcd-8216-ce6ae16ca609, 'name': SearchDatastore_Task, 'duration_secs': 0.017321} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.593044] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94cabc75-a257-47e4-9b29-62b4f919d42e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.602451] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for the task: (returnval){ [ 1528.602451] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52cb9e1a-8ddf-e848-c160-37135cce81cf" [ 1528.602451] env[62619]: _type = "Task" [ 1528.602451] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.614250] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52cb9e1a-8ddf-e848-c160-37135cce81cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.659178] env[62619]: INFO nova.compute.manager [-] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Took 1.43 seconds to deallocate network for instance. [ 1528.689737] env[62619]: DEBUG nova.compute.manager [req-28c4ba4f-0c81-426f-894e-f9e2f4cdfc66 req-1b8d5161-6008-4ae5-9071-854c8b4a09c8 service nova] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Received event network-vif-deleted-b96dbc91-29c9-4dc7-ab5b-6706e585d2d5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1528.699474] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.771790] env[62619]: DEBUG oslo_vmware.api [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] Task: {'id': task-1777599, 'name': ReconfigVM_Task, 'duration_secs': 0.181009} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.771892] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aa68a6f9-cce4-47e5-8a9e-a64821f249c3 tempest-ServersAdminTestJSON-982114003 tempest-ServersAdminTestJSON-982114003-project-admin] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Reconfigured VM instance to set the machine id {{(pid=62619) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1528.796644] env[62619]: INFO nova.compute.manager [-] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Took 1.68 seconds to deallocate network for instance. [ 1528.928903] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27119611-e1eb-490a-b119-4503010877bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.938617] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57a47d8-f971-421b-9978-075248be5bd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.976640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b2b573-f773-4a90-bb4b-6353a80ac9cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.985479] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbb002e-9088-4e29-bc52-bc2275300cda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.000150] env[62619]: DEBUG nova.compute.provider_tree [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1529.077939] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "refresh_cache-eca829be-d425-4668-9ebd-1247c5ff19d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.078310] env[62619]: DEBUG nova.compute.manager [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Instance network_info: |[{"id": "19adac57-34a5-41ed-8245-9f4cef383981", "address": "fa:16:3e:e1:a9:c6", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19adac57-34", "ovs_interfaceid": "19adac57-34a5-41ed-8245-9f4cef383981", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1529.078735] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:a9:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a27fd90b-16a5-43af-bede-ae36762ece00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19adac57-34a5-41ed-8245-9f4cef383981', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1529.087215] env[62619]: DEBUG oslo.service.loopingcall [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.087429] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1529.087653] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-352124ff-7c17-4785-968b-07414da3e690 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.108897] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1529.108897] env[62619]: value = "task-1777600" [ 1529.108897] env[62619]: _type = "Task" [ 1529.108897] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.115206] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52cb9e1a-8ddf-e848-c160-37135cce81cf, 'name': SearchDatastore_Task, 'duration_secs': 0.011334} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.115585] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.115717] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 39adf15c-f77e-4737-aeeb-258887007b9a/39adf15c-f77e-4737-aeeb-258887007b9a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1529.116304] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02dde674-5cb7-48e5-8717-9a50d7e6b910 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.122937] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777600, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.124265] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for the task: (returnval){ [ 1529.124265] env[62619]: value = "task-1777601" [ 1529.124265] env[62619]: _type = "Task" [ 1529.124265] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.132412] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.171153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.292565] env[62619]: DEBUG nova.compute.manager [req-059ed7ac-ff2f-4114-88cc-c70e71a4224c req-704fb54f-1ddf-4a9d-a5ca-e3d21a410f09 service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Received event network-changed-19adac57-34a5-41ed-8245-9f4cef383981 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1529.292767] env[62619]: DEBUG nova.compute.manager [req-059ed7ac-ff2f-4114-88cc-c70e71a4224c req-704fb54f-1ddf-4a9d-a5ca-e3d21a410f09 service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Refreshing instance network info cache due to event network-changed-19adac57-34a5-41ed-8245-9f4cef383981. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1529.292987] env[62619]: DEBUG oslo_concurrency.lockutils [req-059ed7ac-ff2f-4114-88cc-c70e71a4224c req-704fb54f-1ddf-4a9d-a5ca-e3d21a410f09 service nova] Acquiring lock "refresh_cache-eca829be-d425-4668-9ebd-1247c5ff19d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.293590] env[62619]: DEBUG oslo_concurrency.lockutils [req-059ed7ac-ff2f-4114-88cc-c70e71a4224c req-704fb54f-1ddf-4a9d-a5ca-e3d21a410f09 service nova] Acquired lock "refresh_cache-eca829be-d425-4668-9ebd-1247c5ff19d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.293775] env[62619]: DEBUG nova.network.neutron [req-059ed7ac-ff2f-4114-88cc-c70e71a4224c req-704fb54f-1ddf-4a9d-a5ca-e3d21a410f09 service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Refreshing network info cache for port 19adac57-34a5-41ed-8245-9f4cef383981 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1529.303986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.396531] env[62619]: DEBUG nova.compute.manager [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1529.419410] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:50:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='026a8c7d-034e-431f-86ad-5b594effd325',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-602383372',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1529.419962] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.420308] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1529.420632] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.420926] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1529.421229] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1529.421605] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1529.421901] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1529.422233] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1529.422664] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1529.423023] env[62619]: DEBUG nova.virt.hardware [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1529.424222] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6723d2e-1036-4558-a651-90671115e0e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.436199] env[62619]: INFO nova.compute.manager [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Rebuilding instance [ 1529.439708] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651284a2-3551-4343-8767-eb9642365a13 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.493824] env[62619]: DEBUG nova.compute.manager [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1529.493824] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1096a4-f6b2-4306-bd5e-9c51934ab2f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.539152] env[62619]: DEBUG nova.scheduler.client.report [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 73 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1529.539406] env[62619]: DEBUG nova.compute.provider_tree [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 73 to 74 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1529.539577] env[62619]: DEBUG nova.compute.provider_tree [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1529.619981] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777600, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.636913] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777601, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.049021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.676s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.049021] env[62619]: DEBUG nova.compute.manager [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1530.049436] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.999s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.129152] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777600, 'name': CreateVM_Task, 'duration_secs': 0.637819} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.136795] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1530.137932] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.138141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.138466] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1530.139154] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8c4d3ad-8f84-4df0-9d11-fa1178436bb8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.144921] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592956} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.146054] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 39adf15c-f77e-4737-aeeb-258887007b9a/39adf15c-f77e-4737-aeeb-258887007b9a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1530.146282] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1530.146868] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1530.146868] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527ef017-8d5a-c208-5e57-b2efdeaa1cad" [ 1530.146868] env[62619]: _type = "Task" [ 1530.146868] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.147075] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b309ab33-0919-4e45-bec2-713bde190f09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.158890] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527ef017-8d5a-c208-5e57-b2efdeaa1cad, 'name': SearchDatastore_Task, 'duration_secs': 0.011291} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.160413] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.160558] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.161250] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.161439] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.161823] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1530.163301] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for the task: (returnval){ [ 1530.163301] env[62619]: value = "task-1777602" [ 1530.163301] env[62619]: _type = "Task" [ 1530.163301] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.163588] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7309c19-6509-4548-acae-4619cc74bdde {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.178260] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.179405] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1530.179580] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1530.180284] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cea2aae-6e71-4585-bc42-c76cee41f1d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.185699] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1530.185699] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5248409b-7fe9-40d4-73bc-f26ad784665c" [ 1530.185699] env[62619]: _type = "Task" [ 1530.185699] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.194360] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5248409b-7fe9-40d4-73bc-f26ad784665c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.236381] env[62619]: DEBUG nova.network.neutron [req-059ed7ac-ff2f-4114-88cc-c70e71a4224c req-704fb54f-1ddf-4a9d-a5ca-e3d21a410f09 service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Updated VIF entry in instance network info cache for port 19adac57-34a5-41ed-8245-9f4cef383981. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1530.236766] env[62619]: DEBUG nova.network.neutron [req-059ed7ac-ff2f-4114-88cc-c70e71a4224c req-704fb54f-1ddf-4a9d-a5ca-e3d21a410f09 service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Updating instance_info_cache with network_info: [{"id": "19adac57-34a5-41ed-8245-9f4cef383981", "address": "fa:16:3e:e1:a9:c6", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19adac57-34", "ovs_interfaceid": "19adac57-34a5-41ed-8245-9f4cef383981", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.478058] env[62619]: DEBUG nova.network.neutron [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Successfully updated port: b1ace9af-97b6-4d21-bbe4-972a2a1c1e13 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1530.483438] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "cef20063-96f0-46cc-9f7d-4436b60216c6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.483674] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.512299] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1530.512601] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b56e588-2467-4822-8b7b-5f0621f8c0d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.519836] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1530.519836] env[62619]: value = "task-1777603" [ 1530.519836] env[62619]: _type = "Task" [ 1530.519836] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.528890] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.552915] env[62619]: DEBUG nova.compute.utils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1530.565688] env[62619]: DEBUG nova.compute.manager [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1530.565909] env[62619]: DEBUG nova.network.neutron [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1530.676415] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075631} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.676907] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1530.677399] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae87c88-17c3-4b3c-91d9-d1d83e16b24e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.700981] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 39adf15c-f77e-4737-aeeb-258887007b9a/39adf15c-f77e-4737-aeeb-258887007b9a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1530.704442] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88835b72-742c-4cc6-830c-1bea76128441 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.726009] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5248409b-7fe9-40d4-73bc-f26ad784665c, 'name': SearchDatastore_Task, 'duration_secs': 0.010235} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.727827] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for the task: (returnval){ [ 1530.727827] env[62619]: value = "task-1777604" [ 1530.727827] env[62619]: _type = "Task" [ 1530.727827] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.728038] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-963e60a7-5d49-4a1a-8d17-41a4024f11d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.737900] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1530.737900] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525ee61a-ea0a-9215-d7f5-d2fc8a97049c" [ 1530.737900] env[62619]: _type = "Task" [ 1530.737900] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.741507] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777604, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.745470] env[62619]: DEBUG oslo_concurrency.lockutils [req-059ed7ac-ff2f-4114-88cc-c70e71a4224c req-704fb54f-1ddf-4a9d-a5ca-e3d21a410f09 service nova] Releasing lock "refresh_cache-eca829be-d425-4668-9ebd-1247c5ff19d0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.745764] env[62619]: DEBUG nova.compute.manager [req-059ed7ac-ff2f-4114-88cc-c70e71a4224c req-704fb54f-1ddf-4a9d-a5ca-e3d21a410f09 service nova] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Received event network-vif-deleted-88e763dc-8978-45a5-a870-d55441fb392a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1530.752893] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525ee61a-ea0a-9215-d7f5-d2fc8a97049c, 'name': SearchDatastore_Task, 'duration_secs': 0.010943} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.753207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.753548] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] eca829be-d425-4668-9ebd-1247c5ff19d0/eca829be-d425-4668-9ebd-1247c5ff19d0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1530.753865] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c4a4823-7eb2-4b95-bbb8-7336323e31e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.761564] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1530.761564] env[62619]: value = "task-1777605" [ 1530.761564] env[62619]: _type = "Task" [ 1530.761564] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.774311] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777605, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.793888] env[62619]: DEBUG nova.policy [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7de19beaddcd46718c91b9367b64981e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '333f40a8350d4a4586cd2236bc63bef9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1530.983483] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.983483] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.983483] env[62619]: DEBUG nova.network.neutron [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1530.986773] env[62619]: DEBUG nova.compute.utils [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1531.034568] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777603, 'name': PowerOffVM_Task, 'duration_secs': 0.369989} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.034568] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1531.034568] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1531.034568] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac61936-24a7-42a2-91a1-3cac54eb559a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.042138] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1531.042469] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b17fda66-64d7-4568-8517-dc471a22d323 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.068028] env[62619]: DEBUG nova.compute.manager [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1531.106567] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ac03bcf3-61df-4557-8018-0ad54ef30f17 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.106755] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4763e489-5aeb-4dc0-b327-b79a55afdfe3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.106895] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ed34ae20-a891-45aa-8124-f36f264937f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.107047] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance fb231b38-950e-4c86-bfe5-4c10a304910f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.107220] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4ee81568-ad9a-4ded-b6fe-15503d85968e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1531.107384] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.107515] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance b6aae13f-0711-4421-9d55-de7ece3e4b89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.107631] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.107761] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 80363e16-5dd2-42ad-9ead-25b121d62211 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1531.107879] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance aa4906f1-e801-4df0-819e-8c5fb5930fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.108059] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance d4230edc-cfda-4b9f-ab42-2f39c699ff03 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1531.108237] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 9014ef05-64d1-4bd6-9f2e-db58003b6520 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.108413] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 40eeb844-7423-4818-8095-81062c7e6392 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1531.108646] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cef20063-96f0-46cc-9f7d-4436b60216c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.108817] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 060427a2-e724-4c51-879e-675154ae5df2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1531.108978] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance da806d3f-79f0-4188-a2d8-0beeb9dfec1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.109115] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e32cb991-a018-4b55-8cdf-378e212c8434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.109294] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1531.109421] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 7217d898-54ee-46ed-88fa-959c38e988e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.109617] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance a802534f-1766-4ea9-9188-803ef197d775 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1531.109750] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ca5f5f6b-5303-4af4-adaa-e4aac72a90f8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1531.109873] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 78c7a111-d497-4114-b4f4-07319e6e7df2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1531.110023] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cd8b8828-79cf-4a7c-b018-b8bd745aaa45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.110224] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 39adf15c-f77e-4737-aeeb-258887007b9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.110382] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance eca829be-d425-4668-9ebd-1247c5ff19d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.110531] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.110658] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 0272ca2a-e9ff-4af5-8120-278a82d74627 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1531.168180] env[62619]: DEBUG nova.network.neutron [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Successfully created port: f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1531.238046] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1531.238603] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1531.238944] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleting the datastore file [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1531.242172] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a616754e-4b47-4642-8c0b-be12ba4645e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.245559] env[62619]: DEBUG nova.compute.manager [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Received event network-vif-plugged-b1ace9af-97b6-4d21-bbe4-972a2a1c1e13 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1531.245893] env[62619]: DEBUG oslo_concurrency.lockutils [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] Acquiring lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.246192] env[62619]: DEBUG oslo_concurrency.lockutils [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] Lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.246533] env[62619]: DEBUG oslo_concurrency.lockutils [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] Lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.246731] env[62619]: DEBUG nova.compute.manager [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] No waiting events found dispatching network-vif-plugged-b1ace9af-97b6-4d21-bbe4-972a2a1c1e13 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1531.246973] env[62619]: WARNING nova.compute.manager [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Received unexpected event network-vif-plugged-b1ace9af-97b6-4d21-bbe4-972a2a1c1e13 for instance with vm_state building and task_state spawning. [ 1531.247201] env[62619]: DEBUG nova.compute.manager [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Received event network-changed-b1ace9af-97b6-4d21-bbe4-972a2a1c1e13 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1531.247418] env[62619]: DEBUG nova.compute.manager [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Refreshing instance network info cache due to event network-changed-b1ace9af-97b6-4d21-bbe4-972a2a1c1e13. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1531.247685] env[62619]: DEBUG oslo_concurrency.lockutils [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] Acquiring lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.255606] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777604, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.259826] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1531.259826] env[62619]: value = "task-1777607" [ 1531.259826] env[62619]: _type = "Task" [ 1531.259826] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.275879] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.279190] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777605, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.490773] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.541837] env[62619]: DEBUG nova.network.neutron [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1531.614826] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 79dfeb2b-06d0-45f1-b97e-10fa4f00d282 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1531.736427] env[62619]: DEBUG nova.network.neutron [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance_info_cache with network_info: [{"id": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "address": "fa:16:3e:70:d7:d3", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1ace9af-97", "ovs_interfaceid": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.745368] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777604, 'name': ReconfigVM_Task, 'duration_secs': 0.709595} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.745872] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 39adf15c-f77e-4737-aeeb-258887007b9a/39adf15c-f77e-4737-aeeb-258887007b9a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1531.746777] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5efb4e5c-85a7-48be-8b13-ef9f7336d437 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.755236] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for the task: (returnval){ [ 1531.755236] env[62619]: value = "task-1777608" [ 1531.755236] env[62619]: _type = "Task" [ 1531.755236] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.776478] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197012} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.776478] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777608, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.776727] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1531.776896] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1531.777330] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1531.782989] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777605, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575576} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.783654] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] eca829be-d425-4668-9ebd-1247c5ff19d0/eca829be-d425-4668-9ebd-1247c5ff19d0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1531.783881] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1531.784227] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-00f49fe1-2ea5-42e7-99c8-8a2b909c20ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.792307] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1531.792307] env[62619]: value = "task-1777609" [ 1531.792307] env[62619]: _type = "Task" [ 1531.792307] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.803952] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777609, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.077211] env[62619]: DEBUG nova.compute.manager [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1532.105844] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1532.106107] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1532.106267] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1532.106445] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1532.106590] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1532.106730] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1532.106928] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1532.107098] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1532.107520] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1532.107520] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1532.107617] env[62619]: DEBUG nova.virt.hardware [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1532.108424] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6695f5-a8f1-4ff6-9a8b-58266e9e9c6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.117712] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 0a80942c-eb86-480b-ab7b-33112dd90d28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1532.122157] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd648ac2-b7af-4d40-a8a1-b39e83072597 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.240013] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.240411] env[62619]: DEBUG nova.compute.manager [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Instance network_info: |[{"id": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "address": "fa:16:3e:70:d7:d3", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1ace9af-97", "ovs_interfaceid": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1532.240745] env[62619]: DEBUG oslo_concurrency.lockutils [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] Acquired lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.240923] env[62619]: DEBUG nova.network.neutron [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Refreshing network info cache for port b1ace9af-97b6-4d21-bbe4-972a2a1c1e13 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1532.245108] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:d7:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1ace9af-97b6-4d21-bbe4-972a2a1c1e13', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1532.252617] env[62619]: DEBUG oslo.service.loopingcall [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.257481] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1532.258069] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d86448e9-8246-44c0-a646-4ed9f9c8aec8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.282309] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777608, 'name': Rename_Task, 'duration_secs': 0.461576} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.283570] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1532.283795] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1532.283795] env[62619]: value = "task-1777610" [ 1532.283795] env[62619]: _type = "Task" [ 1532.283795] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.287684] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b2ce7df-a1e6-4177-8946-e54db279b845 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.301360] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777610, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.305150] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777609, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.281467} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.305418] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for the task: (returnval){ [ 1532.305418] env[62619]: value = "task-1777611" [ 1532.305418] env[62619]: _type = "Task" [ 1532.305418] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.305642] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1532.306462] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e71bdd-f168-4283-85c6-6ad83df00c41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.335807] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] eca829be-d425-4668-9ebd-1247c5ff19d0/eca829be-d425-4668-9ebd-1247c5ff19d0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1532.343205] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3d21e0d-826c-4396-8a5b-5bffa459999e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.358848] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777611, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.366788] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1532.366788] env[62619]: value = "task-1777612" [ 1532.366788] env[62619]: _type = "Task" [ 1532.366788] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.376712] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777612, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.557573] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "cef20063-96f0-46cc-9f7d-4436b60216c6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.557929] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.558214] env[62619]: INFO nova.compute.manager [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Attaching volume 29d5a6b8-d9f2-4372-b79f-d8066f83c85d to /dev/sdb [ 1532.612498] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71491360-039c-45f9-94ac-cc241e323fe9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.621294] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b839803-4558-4779-b1a4-5b10c9ebac6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.625283] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 1f86b805-0fde-4bda-9a94-d440a670e23c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1532.640264] env[62619]: DEBUG nova.virt.block_device [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Updating existing volume attachment record: 33ff2b74-b0b3-42c9-be35-d1e960cb9ac0 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1532.803187] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777610, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.817303] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777611, 'name': PowerOnVM_Task} progress is 74%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.824147] env[62619]: DEBUG nova.network.neutron [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updated VIF entry in instance network info cache for port b1ace9af-97b6-4d21-bbe4-972a2a1c1e13. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1532.824615] env[62619]: DEBUG nova.network.neutron [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance_info_cache with network_info: [{"id": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "address": "fa:16:3e:70:d7:d3", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1ace9af-97", "ovs_interfaceid": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.827869] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1532.828106] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1532.828264] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1532.828448] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1532.828593] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1532.828737] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1532.828996] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1532.829260] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1532.829445] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1532.829633] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1532.829859] env[62619]: DEBUG nova.virt.hardware [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1532.830836] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a572260-f4c3-4688-9f73-5821620ff82e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.842295] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f02c81a-9e93-4df8-b64a-415975162cdc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.856819] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:19:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6faf342-2332-4eee-bdde-dafce4f0a856', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1532.864886] env[62619]: DEBUG oslo.service.loopingcall [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.864886] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1532.865328] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3378fce-df87-40c4-aa25-9e4d6cc8e919 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.888862] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777612, 'name': ReconfigVM_Task, 'duration_secs': 0.465361} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.890171] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Reconfigured VM instance instance-00000031 to attach disk [datastore1] eca829be-d425-4668-9ebd-1247c5ff19d0/eca829be-d425-4668-9ebd-1247c5ff19d0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1532.890758] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1532.890758] env[62619]: value = "task-1777614" [ 1532.890758] env[62619]: _type = "Task" [ 1532.890758] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.890946] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-612f1c64-d425-4c5e-af26-562d8730c6c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.899065] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1532.899065] env[62619]: value = "task-1777615" [ 1532.899065] env[62619]: _type = "Task" [ 1532.899065] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.905486] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777614, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.910766] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777615, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.128647] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance fab3d689-9e30-4afd-b0cc-49c6d2870c50 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1533.284523] env[62619]: DEBUG nova.network.neutron [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Successfully updated port: f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1533.306787] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777610, 'name': CreateVM_Task, 'duration_secs': 0.692717} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.307159] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1533.311207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.311419] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.311760] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1533.312388] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19b71df4-c710-4fec-adfe-9e7f91bdb752 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.318268] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1533.318268] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52682f21-f5b3-409b-254e-f283de89c6f1" [ 1533.318268] env[62619]: _type = "Task" [ 1533.318268] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.322167] env[62619]: DEBUG oslo_vmware.api [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777611, 'name': PowerOnVM_Task, 'duration_secs': 0.899276} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.325515] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1533.325648] env[62619]: INFO nova.compute.manager [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Took 9.53 seconds to spawn the instance on the hypervisor. [ 1533.325859] env[62619]: DEBUG nova.compute.manager [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1533.326885] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364f854a-91f9-4616-8112-38c750129d1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.339887] env[62619]: DEBUG oslo_concurrency.lockutils [req-4921a349-05d8-4c85-bd59-70747ffe8707 req-d2cf437e-b218-45c3-9d8a-26f00c8def4b service nova] Releasing lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.340048] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52682f21-f5b3-409b-254e-f283de89c6f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.408816] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777614, 'name': CreateVM_Task, 'duration_secs': 0.451184} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.409454] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1533.410221] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.414927] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777615, 'name': Rename_Task, 'duration_secs': 0.195854} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.415365] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1533.416453] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ca40ad5-613c-46b1-bc89-2f9a1c1c0e49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.424232] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1533.424232] env[62619]: value = "task-1777618" [ 1533.424232] env[62619]: _type = "Task" [ 1533.424232] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.434524] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.461262] env[62619]: DEBUG nova.compute.manager [req-550df080-5438-47c6-9ba4-82d5bb28c6d5 req-cbb15273-8224-4b5d-bb05-5d26d6439905 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Received event network-vif-plugged-f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1533.461621] env[62619]: DEBUG oslo_concurrency.lockutils [req-550df080-5438-47c6-9ba4-82d5bb28c6d5 req-cbb15273-8224-4b5d-bb05-5d26d6439905 service nova] Acquiring lock "0272ca2a-e9ff-4af5-8120-278a82d74627-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.461885] env[62619]: DEBUG oslo_concurrency.lockutils [req-550df080-5438-47c6-9ba4-82d5bb28c6d5 req-cbb15273-8224-4b5d-bb05-5d26d6439905 service nova] Lock "0272ca2a-e9ff-4af5-8120-278a82d74627-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.462095] env[62619]: DEBUG oslo_concurrency.lockutils [req-550df080-5438-47c6-9ba4-82d5bb28c6d5 req-cbb15273-8224-4b5d-bb05-5d26d6439905 service nova] Lock "0272ca2a-e9ff-4af5-8120-278a82d74627-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.462336] env[62619]: DEBUG nova.compute.manager [req-550df080-5438-47c6-9ba4-82d5bb28c6d5 req-cbb15273-8224-4b5d-bb05-5d26d6439905 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] No waiting events found dispatching network-vif-plugged-f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1533.462575] env[62619]: WARNING nova.compute.manager [req-550df080-5438-47c6-9ba4-82d5bb28c6d5 req-cbb15273-8224-4b5d-bb05-5d26d6439905 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Received unexpected event network-vif-plugged-f3bed801-3e28-49e7-83d3-60dcdf9a38ea for instance with vm_state building and task_state spawning. [ 1533.634511] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ae37cae9-c82e-4775-8a8f-6bbf9108b0bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1533.788107] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.788107] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquired lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.788107] env[62619]: DEBUG nova.network.neutron [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1533.834173] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52682f21-f5b3-409b-254e-f283de89c6f1, 'name': SearchDatastore_Task, 'duration_secs': 0.01764} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.834660] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.834898] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1533.835147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.835300] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.835484] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1533.835793] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.836104] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1533.836331] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0da4194b-48e4-4c1e-90f0-dea4c45f4163 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.838205] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9591d62e-2aed-4a29-a30e-bb65592040b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.848032] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1533.848032] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525d4b14-e0a0-22f2-86a7-b2e2d9f03d34" [ 1533.848032] env[62619]: _type = "Task" [ 1533.848032] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.851527] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1533.851776] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1533.853146] env[62619]: INFO nova.compute.manager [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Took 41.72 seconds to build instance. [ 1533.857035] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f54ee9b6-aae9-4d3e-9d27-e08c4aa4252e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.865829] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525d4b14-e0a0-22f2-86a7-b2e2d9f03d34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.867665] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1533.867665] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526a68b2-b0f2-647c-5dd8-9066e19fc617" [ 1533.867665] env[62619]: _type = "Task" [ 1533.867665] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.876993] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526a68b2-b0f2-647c-5dd8-9066e19fc617, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.935795] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777618, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.135617] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance b1c3c213-599d-4cab-8224-d87467d774c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1534.155233] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.155551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.328070] env[62619]: DEBUG nova.network.neutron [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1534.361997] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca727633-7cc6-467e-ba08-e738ed735a5d tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "39adf15c-f77e-4737-aeeb-258887007b9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.273s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.362273] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525d4b14-e0a0-22f2-86a7-b2e2d9f03d34, 'name': SearchDatastore_Task, 'duration_secs': 0.02225} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.363579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.363847] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1534.364050] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.383118] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526a68b2-b0f2-647c-5dd8-9066e19fc617, 'name': SearchDatastore_Task, 'duration_secs': 0.024598} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.384045] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b7b8e06-6355-467a-b32c-ec16b166b8ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.390068] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1534.390068] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e78e4-643c-edfd-c8b3-ad1d6ad52b5e" [ 1534.390068] env[62619]: _type = "Task" [ 1534.390068] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.401569] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e78e4-643c-edfd-c8b3-ad1d6ad52b5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.436707] env[62619]: DEBUG oslo_vmware.api [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777618, 'name': PowerOnVM_Task, 'duration_secs': 0.531509} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.436967] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1534.437179] env[62619]: INFO nova.compute.manager [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Took 7.64 seconds to spawn the instance on the hypervisor. [ 1534.437353] env[62619]: DEBUG nova.compute.manager [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1534.438119] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ae6618-9946-4491-b9b4-ae0111df0823 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.548519] env[62619]: DEBUG nova.network.neutron [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Updating instance_info_cache with network_info: [{"id": "f3bed801-3e28-49e7-83d3-60dcdf9a38ea", "address": "fa:16:3e:2b:af:6a", "network": {"id": "d28ddb7a-9db3-465f-8343-8f23b12b5183", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2033557069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "333f40a8350d4a4586cd2236bc63bef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bed801-3e", "ovs_interfaceid": "f3bed801-3e28-49e7-83d3-60dcdf9a38ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.638613] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1534.638824] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1534.638978] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1534.866776] env[62619]: DEBUG nova.compute.manager [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1534.900688] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e78e4-643c-edfd-c8b3-ad1d6ad52b5e, 'name': SearchDatastore_Task, 'duration_secs': 0.019217} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.900961] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.901234] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5/dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1534.903809] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.904011] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1534.904251] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c7fd655-6843-49bf-8880-a0833a8303fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.906636] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d456e1d-f42d-41cb-99d3-037a3e7a52e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.914822] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1534.914822] env[62619]: value = "task-1777619" [ 1534.914822] env[62619]: _type = "Task" [ 1534.914822] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.921663] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1534.921849] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1534.923368] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1986cf54-91ea-4f80-84e7-c9eae6e60f3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.929088] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777619, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.932712] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1534.932712] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527f3307-7ba6-e7b2-e424-e4c696c25b13" [ 1534.932712] env[62619]: _type = "Task" [ 1534.932712] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.944407] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527f3307-7ba6-e7b2-e424-e4c696c25b13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.959012] env[62619]: INFO nova.compute.manager [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Took 40.29 seconds to build instance. [ 1535.055614] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Releasing lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.056231] env[62619]: DEBUG nova.compute.manager [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Instance network_info: |[{"id": "f3bed801-3e28-49e7-83d3-60dcdf9a38ea", "address": "fa:16:3e:2b:af:6a", "network": {"id": "d28ddb7a-9db3-465f-8343-8f23b12b5183", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2033557069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "333f40a8350d4a4586cd2236bc63bef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bed801-3e", "ovs_interfaceid": "f3bed801-3e28-49e7-83d3-60dcdf9a38ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1535.056231] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:af:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3bed801-3e28-49e7-83d3-60dcdf9a38ea', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1535.064325] env[62619]: DEBUG oslo.service.loopingcall [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1535.067112] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1535.067970] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-836f8e67-a083-4cf8-adbe-f2c05403bbce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.091291] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1535.091291] env[62619]: value = "task-1777621" [ 1535.091291] env[62619]: _type = "Task" [ 1535.091291] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.101177] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777621, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.134335] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a40f141-1e5c-4cd2-9d5f-8daa2cb3b4c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.143316] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfdb35b-6136-456d-befc-444755f1ccdb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.180636] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquiring lock "39adf15c-f77e-4737-aeeb-258887007b9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.181599] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "39adf15c-f77e-4737-aeeb-258887007b9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.181599] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquiring lock "39adf15c-f77e-4737-aeeb-258887007b9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.181599] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "39adf15c-f77e-4737-aeeb-258887007b9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.181852] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "39adf15c-f77e-4737-aeeb-258887007b9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.184113] env[62619]: INFO nova.compute.manager [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Terminating instance [ 1535.186336] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51b06b2-d7f8-4ebb-b786-653b9280173c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.197078] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860614f0-709a-40a1-ad10-0430707a9cf0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.213990] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1535.388116] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.426019] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777619, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.447902] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527f3307-7ba6-e7b2-e424-e4c696c25b13, 'name': SearchDatastore_Task, 'duration_secs': 0.018284} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.448789] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ba78aa8-c0c1-411e-ad0f-55d06ad7db35 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.455435] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1535.455435] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5268424c-1ed6-a040-b4f8-7dcdf63791cb" [ 1535.455435] env[62619]: _type = "Task" [ 1535.455435] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.461636] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a05d2844-830f-42e5-95b9-97f667fe193d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "eca829be-d425-4668-9ebd-1247c5ff19d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.809s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.467752] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5268424c-1ed6-a040-b4f8-7dcdf63791cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.487666] env[62619]: DEBUG nova.compute.manager [req-5f30128c-0ade-4ba4-9290-a20aecbbfeb8 req-85fde1ec-b97c-4cc2-9737-06d1b47aba68 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Received event network-changed-f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1535.488050] env[62619]: DEBUG nova.compute.manager [req-5f30128c-0ade-4ba4-9290-a20aecbbfeb8 req-85fde1ec-b97c-4cc2-9737-06d1b47aba68 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Refreshing instance network info cache due to event network-changed-f3bed801-3e28-49e7-83d3-60dcdf9a38ea. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1535.488154] env[62619]: DEBUG oslo_concurrency.lockutils [req-5f30128c-0ade-4ba4-9290-a20aecbbfeb8 req-85fde1ec-b97c-4cc2-9737-06d1b47aba68 service nova] Acquiring lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.488261] env[62619]: DEBUG oslo_concurrency.lockutils [req-5f30128c-0ade-4ba4-9290-a20aecbbfeb8 req-85fde1ec-b97c-4cc2-9737-06d1b47aba68 service nova] Acquired lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.488427] env[62619]: DEBUG nova.network.neutron [req-5f30128c-0ade-4ba4-9290-a20aecbbfeb8 req-85fde1ec-b97c-4cc2-9737-06d1b47aba68 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Refreshing network info cache for port f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1535.602425] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777621, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.692281] env[62619]: DEBUG nova.compute.manager [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1535.692716] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1535.693607] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b34a5f6-3307-4b5c-9f72-db073ce3b28c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.702888] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1535.703231] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-815d3402-28d1-4694-97d8-b68c6d6a8811 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.711922] env[62619]: DEBUG oslo_vmware.api [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for the task: (returnval){ [ 1535.711922] env[62619]: value = "task-1777622" [ 1535.711922] env[62619]: _type = "Task" [ 1535.711922] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.717991] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1535.726093] env[62619]: DEBUG oslo_vmware.api [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777622, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.925979] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777619, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581101} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.926601] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5/dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1535.926601] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1535.926884] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ebb12707-00cf-45fd-8b35-2692f6cb9934 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.934125] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1535.934125] env[62619]: value = "task-1777623" [ 1535.934125] env[62619]: _type = "Task" [ 1535.934125] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.944469] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777623, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.965445] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5268424c-1ed6-a040-b4f8-7dcdf63791cb, 'name': SearchDatastore_Task, 'duration_secs': 0.021412} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.965705] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.965969] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1535.966234] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51ce3923-911f-4071-bf60-4b073c81094e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.968607] env[62619]: DEBUG nova.compute.manager [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1535.977712] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1535.977712] env[62619]: value = "task-1777624" [ 1535.977712] env[62619]: _type = "Task" [ 1535.977712] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.986665] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.106020] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777621, 'name': CreateVM_Task, 'duration_secs': 0.849905} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.106020] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1536.106020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1536.106020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.106020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1536.106020] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e1940d8-91e3-4116-ba3b-2fbd54d1fb66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.113128] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1536.113128] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52052296-3ed3-3cf2-164c-5411e095684e" [ 1536.113128] env[62619]: _type = "Task" [ 1536.113128] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.121230] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52052296-3ed3-3cf2-164c-5411e095684e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.226472] env[62619]: DEBUG oslo_vmware.api [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777622, 'name': PowerOffVM_Task, 'duration_secs': 0.348571} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.226472] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1536.226472] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1536.226472] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d2e1bd7-d52c-47bc-bde0-09d461d99d0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.227752] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1536.227752] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.178s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.227977] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.816s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.229513] env[62619]: INFO nova.compute.claims [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1536.348986] env[62619]: DEBUG nova.network.neutron [req-5f30128c-0ade-4ba4-9290-a20aecbbfeb8 req-85fde1ec-b97c-4cc2-9737-06d1b47aba68 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Updated VIF entry in instance network info cache for port f3bed801-3e28-49e7-83d3-60dcdf9a38ea. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1536.349423] env[62619]: DEBUG nova.network.neutron [req-5f30128c-0ade-4ba4-9290-a20aecbbfeb8 req-85fde1ec-b97c-4cc2-9737-06d1b47aba68 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Updating instance_info_cache with network_info: [{"id": "f3bed801-3e28-49e7-83d3-60dcdf9a38ea", "address": "fa:16:3e:2b:af:6a", "network": {"id": "d28ddb7a-9db3-465f-8343-8f23b12b5183", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2033557069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "333f40a8350d4a4586cd2236bc63bef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bed801-3e", "ovs_interfaceid": "f3bed801-3e28-49e7-83d3-60dcdf9a38ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.383838] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1536.384110] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1536.384362] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Deleting the datastore file [datastore1] 39adf15c-f77e-4737-aeeb-258887007b9a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1536.384639] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35e9c50a-e39e-4f68-bdb6-b029a4242fe9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.392769] env[62619]: DEBUG oslo_vmware.api [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for the task: (returnval){ [ 1536.392769] env[62619]: value = "task-1777626" [ 1536.392769] env[62619]: _type = "Task" [ 1536.392769] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.405267] env[62619]: DEBUG oslo_vmware.api [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.444388] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777623, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14373} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.444657] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1536.445524] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16de9e75-82a3-4146-9574-c7b7f44a548f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.469547] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5/dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1536.469889] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a48f8de2-f237-44c2-90d8-22e96e7944d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.496519] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1536.496519] env[62619]: value = "task-1777627" [ 1536.496519] env[62619]: _type = "Task" [ 1536.496519] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.499793] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777624, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.509208] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777627, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.510189] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.625693] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52052296-3ed3-3cf2-164c-5411e095684e, 'name': SearchDatastore_Task, 'duration_secs': 0.045224} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.626015] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.626265] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1536.626499] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1536.626644] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.626814] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1536.627101] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37b727f0-7061-484f-98a3-c25b95477232 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.652216] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1536.652434] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1536.654218] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c680e73-2dad-418b-9daf-3452ccf39f01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.669317] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1536.669317] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fce97a-3942-3437-f200-912171fc06ea" [ 1536.669317] env[62619]: _type = "Task" [ 1536.669317] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.679368] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fce97a-3942-3437-f200-912171fc06ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.758910] env[62619]: DEBUG nova.compute.manager [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1536.759899] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6919188a-5dac-41ae-a3f5-6f1d3d16c357 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.852666] env[62619]: DEBUG oslo_concurrency.lockutils [req-5f30128c-0ade-4ba4-9290-a20aecbbfeb8 req-85fde1ec-b97c-4cc2-9737-06d1b47aba68 service nova] Releasing lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.904226] env[62619]: DEBUG oslo_vmware.api [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.997502] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777624, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.009526] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777627, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.184023] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fce97a-3942-3437-f200-912171fc06ea, 'name': SearchDatastore_Task, 'duration_secs': 0.081924} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.184023] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b005c5a-51f8-4185-8664-a3fea9148073 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.189451] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1537.189451] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5251d6ee-8f75-f42c-adeb-16f650893ce4" [ 1537.189451] env[62619]: _type = "Task" [ 1537.189451] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.202648] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5251d6ee-8f75-f42c-adeb-16f650893ce4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.211752] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1537.211897] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369019', 'volume_id': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'name': 'volume-29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cef20063-96f0-46cc-9f7d-4436b60216c6', 'attached_at': '', 'detached_at': '', 'volume_id': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'serial': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1537.212819] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b5731f-4607-4362-b1f8-92651f3a59a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.231645] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6f0609-d2c1-4aae-afaa-0c6221100cae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.261146] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] volume-29d5a6b8-d9f2-4372-b79f-d8066f83c85d/volume-29d5a6b8-d9f2-4372-b79f-d8066f83c85d.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1537.262134] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f47f3f5-680e-4bcc-9a3a-f6e3782217c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.277451] env[62619]: INFO nova.compute.manager [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] instance snapshotting [ 1537.278784] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a045988-b0c5-4151-829b-231703d2526a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.300775] env[62619]: DEBUG oslo_vmware.api [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1537.300775] env[62619]: value = "task-1777628" [ 1537.300775] env[62619]: _type = "Task" [ 1537.300775] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.304136] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb7bd61-77c4-4832-906f-9b9269d91071 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.313296] env[62619]: DEBUG oslo_vmware.api [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.407077] env[62619]: DEBUG oslo_vmware.api [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Task: {'id': task-1777626, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.711493} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.407077] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1537.407077] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1537.407077] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1537.407450] env[62619]: INFO nova.compute.manager [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1537.407450] env[62619]: DEBUG oslo.service.loopingcall [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1537.407668] env[62619]: DEBUG nova.compute.manager [-] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1537.407781] env[62619]: DEBUG nova.network.neutron [-] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1537.498158] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777624, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.101432} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.498636] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1537.498636] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1537.498868] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98bc7279-f979-4cc6-8d71-d95dbb4d55dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.513162] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1537.513162] env[62619]: value = "task-1777629" [ 1537.513162] env[62619]: _type = "Task" [ 1537.513162] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.513493] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777627, 'name': ReconfigVM_Task, 'duration_secs': 0.911865} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.513753] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Reconfigured VM instance instance-00000032 to attach disk [datastore1] dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5/dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1537.517611] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09aa44ea-8001-4ac1-8977-5765e38a1b69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.526544] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.528110] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1537.528110] env[62619]: value = "task-1777630" [ 1537.528110] env[62619]: _type = "Task" [ 1537.528110] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.539399] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777630, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.704310] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5251d6ee-8f75-f42c-adeb-16f650893ce4, 'name': SearchDatastore_Task, 'duration_secs': 0.014926} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.707127] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1537.707651] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0272ca2a-e9ff-4af5-8120-278a82d74627/0272ca2a-e9ff-4af5-8120-278a82d74627.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1537.707860] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ee8504e-7697-4215-a954-993a9c020432 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.715106] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1537.715106] env[62619]: value = "task-1777631" [ 1537.715106] env[62619]: _type = "Task" [ 1537.715106] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.727404] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.776870] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbc4a86-0c7c-443c-b4d7-a74331c0cd01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.786175] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04d7aa9-15bc-4a76-ad2c-53be95cf6476 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.824216] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4a492f-c421-43de-ad67-6ab7fb744569 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.828462] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1537.829292] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-931e80ec-eb7e-433c-8cd2-bd06d7d2a7dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.840247] env[62619]: DEBUG oslo_vmware.api [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.843111] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1537.843111] env[62619]: value = "task-1777632" [ 1537.843111] env[62619]: _type = "Task" [ 1537.843111] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.844768] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55ee591-a391-4885-8a79-8003970e0a75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.856482] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777632, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.864214] env[62619]: DEBUG nova.compute.provider_tree [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1537.911208] env[62619]: DEBUG nova.compute.manager [req-195072ac-34bb-47d3-8b64-1036c2930d4f req-359a9903-6ba4-49d9-afcb-1a042afed1d0 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Received event network-vif-deleted-32c0a217-0471-4782-8add-8ba1f15d5c75 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1537.911670] env[62619]: INFO nova.compute.manager [req-195072ac-34bb-47d3-8b64-1036c2930d4f req-359a9903-6ba4-49d9-afcb-1a042afed1d0 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Neutron deleted interface 32c0a217-0471-4782-8add-8ba1f15d5c75; detaching it from the instance and deleting it from the info cache [ 1537.912097] env[62619]: DEBUG nova.network.neutron [req-195072ac-34bb-47d3-8b64-1036c2930d4f req-359a9903-6ba4-49d9-afcb-1a042afed1d0 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.027027] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140863} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.030018] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1538.030018] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c782da-fac0-4f98-8671-d70e9686eebf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.042404] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777630, 'name': Rename_Task, 'duration_secs': 0.290088} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.054075] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1538.065969] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1538.067322] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb401d96-f546-4b38-bbc0-3831b637760c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.068320] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8aef93f4-486b-4844-a15e-68eb71256370 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.090704] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1538.090704] env[62619]: value = "task-1777634" [ 1538.090704] env[62619]: _type = "Task" [ 1538.090704] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.092259] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1538.092259] env[62619]: value = "task-1777633" [ 1538.092259] env[62619]: _type = "Task" [ 1538.092259] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.106025] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777634, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.110505] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777633, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.228167] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777631, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.329540] env[62619]: DEBUG nova.network.neutron [-] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.329540] env[62619]: DEBUG oslo_vmware.api [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777628, 'name': ReconfigVM_Task, 'duration_secs': 0.704717} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.329774] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Reconfigured VM instance instance-00000025 to attach disk [datastore1] volume-29d5a6b8-d9f2-4372-b79f-d8066f83c85d/volume-29d5a6b8-d9f2-4372-b79f-d8066f83c85d.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1538.334829] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8fa38e9-df67-48f0-b26b-caeb055b29a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.353906] env[62619]: DEBUG oslo_vmware.api [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1538.353906] env[62619]: value = "task-1777635" [ 1538.353906] env[62619]: _type = "Task" [ 1538.353906] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.361879] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777632, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.365995] env[62619]: DEBUG oslo_vmware.api [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777635, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.388266] env[62619]: ERROR nova.scheduler.client.report [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [req-da2a7087-3e6f-48e2-b07c-08692fb94afb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-da2a7087-3e6f-48e2-b07c-08692fb94afb"}]} [ 1538.410987] env[62619]: DEBUG nova.scheduler.client.report [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1538.415820] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-53f318c8-4d6b-44e9-b1e9-0b9b6f6daf34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.423736] env[62619]: DEBUG nova.scheduler.client.report [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1538.423965] env[62619]: DEBUG nova.compute.provider_tree [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1538.429477] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468481a8-18dd-42b9-be32-7266038b0251 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.440942] env[62619]: DEBUG nova.scheduler.client.report [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1538.471525] env[62619]: DEBUG nova.compute.manager [req-195072ac-34bb-47d3-8b64-1036c2930d4f req-359a9903-6ba4-49d9-afcb-1a042afed1d0 service nova] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Detach interface failed, port_id=32c0a217-0471-4782-8add-8ba1f15d5c75, reason: Instance 39adf15c-f77e-4737-aeeb-258887007b9a could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1538.472694] env[62619]: DEBUG nova.scheduler.client.report [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1538.602293] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777634, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.608587] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777633, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.729248] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777631, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597033} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.729511] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0272ca2a-e9ff-4af5-8120-278a82d74627/0272ca2a-e9ff-4af5-8120-278a82d74627.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1538.729729] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1538.730038] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4388d0ba-4a05-482f-b48e-00551b51c232 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.739623] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1538.739623] env[62619]: value = "task-1777636" [ 1538.739623] env[62619]: _type = "Task" [ 1538.739623] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.751145] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777636, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.836804] env[62619]: INFO nova.compute.manager [-] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Took 1.43 seconds to deallocate network for instance. [ 1538.863659] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777632, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.868288] env[62619]: DEBUG oslo_vmware.api [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777635, 'name': ReconfigVM_Task, 'duration_secs': 0.204687} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.868602] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369019', 'volume_id': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'name': 'volume-29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cef20063-96f0-46cc-9f7d-4436b60216c6', 'attached_at': '', 'detached_at': '', 'volume_id': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'serial': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1538.872033] env[62619]: DEBUG nova.compute.manager [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1538.872883] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342dc8af-bb15-486f-9c86-a97e32381cd0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.935319] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d613ae-0352-469d-a73f-7dae83cec914 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.954616] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7160bb49-c40c-406b-af58-dc17e62c6ae7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.990574] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3da4fbe-8807-4be2-bf3a-7c7d9a7f9bf8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.998355] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936007e6-88f8-4e0c-a571-3a08b18c2b5f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.012624] env[62619]: DEBUG nova.compute.provider_tree [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1539.105546] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777634, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.108546] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777633, 'name': ReconfigVM_Task, 'duration_secs': 0.598164} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.108799] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Reconfigured VM instance instance-00000006 to attach disk [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1539.109407] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3319f86-b190-4fc2-bcf2-48b7ff5f94da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.115957] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1539.115957] env[62619]: value = "task-1777637" [ 1539.115957] env[62619]: _type = "Task" [ 1539.115957] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.124820] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777637, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.251708] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777636, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088597} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.252074] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1539.252885] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7479cbe5-2424-4aae-8d6b-e5482d2d8d3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.279196] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 0272ca2a-e9ff-4af5-8120-278a82d74627/0272ca2a-e9ff-4af5-8120-278a82d74627.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1539.279545] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24614218-ab2c-4a94-bab4-929f99176cfb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.304265] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1539.304265] env[62619]: value = "task-1777638" [ 1539.304265] env[62619]: _type = "Task" [ 1539.304265] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.318522] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777638, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.343978] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.363932] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777632, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.388430] env[62619]: INFO nova.compute.manager [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] instance snapshotting [ 1539.388775] env[62619]: DEBUG nova.objects.instance [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'flavor' on Instance uuid da806d3f-79f0-4188-a2d8-0beeb9dfec1a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1539.555759] env[62619]: DEBUG nova.scheduler.client.report [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1539.556123] env[62619]: DEBUG nova.compute.provider_tree [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 75 to 76 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1539.556366] env[62619]: DEBUG nova.compute.provider_tree [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1539.609123] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777634, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.636660] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777637, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.814394] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777638, 'name': ReconfigVM_Task, 'duration_secs': 0.371556} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.814756] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 0272ca2a-e9ff-4af5-8120-278a82d74627/0272ca2a-e9ff-4af5-8120-278a82d74627.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1539.815397] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6f4971c-319e-4a19-b604-c614a19d3ae6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.822498] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1539.822498] env[62619]: value = "task-1777639" [ 1539.822498] env[62619]: _type = "Task" [ 1539.822498] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.832251] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777639, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.859430] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777632, 'name': CreateSnapshot_Task, 'duration_secs': 1.774343} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.859564] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1539.860337] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318b7c85-704f-4006-ac9b-684505dd7ea6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.894746] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcca97c3-216e-45b3-bafa-22307ce1d483 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.915962] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2dbd6a3-ec28-416b-b1d0-b6b7957ca924 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.930209] env[62619]: DEBUG nova.objects.instance [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lazy-loading 'flavor' on Instance uuid cef20063-96f0-46cc-9f7d-4436b60216c6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1540.065445] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.837s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.066085] env[62619]: DEBUG nova.compute.manager [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1540.068957] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.762s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.070496] env[62619]: INFO nova.compute.claims [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1540.104287] env[62619]: DEBUG oslo_vmware.api [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777634, 'name': PowerOnVM_Task, 'duration_secs': 1.905135} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.105190] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1540.105400] env[62619]: INFO nova.compute.manager [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Took 10.71 seconds to spawn the instance on the hypervisor. [ 1540.105579] env[62619]: DEBUG nova.compute.manager [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1540.106449] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acd1789-4c67-4463-a0bf-d6f812131743 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.128067] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777637, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.334754] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777639, 'name': Rename_Task, 'duration_secs': 0.176754} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.335025] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1540.335258] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bae37505-fe17-419c-b63f-23a325b25f48 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.341350] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1540.341350] env[62619]: value = "task-1777640" [ 1540.341350] env[62619]: _type = "Task" [ 1540.341350] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.348577] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.379559] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1540.379559] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8308785b-c75a-4934-8d15-122845925122 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.390295] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1540.390295] env[62619]: value = "task-1777641" [ 1540.390295] env[62619]: _type = "Task" [ 1540.390295] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.399098] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777641, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.433430] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1540.435336] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-de3409ce-f2e6-4b45-8a0a-560677bd1a68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.437599] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c20b667d-8bd7-4735-b5e8-8544a1b297b6 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.880s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.446485] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1540.446485] env[62619]: value = "task-1777642" [ 1540.446485] env[62619]: _type = "Task" [ 1540.446485] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.457811] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777642, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.577243] env[62619]: DEBUG nova.compute.utils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1540.578842] env[62619]: DEBUG nova.compute.manager [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1540.579062] env[62619]: DEBUG nova.network.neutron [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1540.627093] env[62619]: INFO nova.compute.manager [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Took 42.33 seconds to build instance. [ 1540.635188] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777637, 'name': Rename_Task, 'duration_secs': 1.304006} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.635541] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1540.635855] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-646f461e-eb7b-444d-a1b1-f1f837f054e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.642131] env[62619]: DEBUG nova.policy [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54b468158fa84d5b8e6ec81f380e12c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3015193883ab4a67a9c084424c40806c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1540.646826] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1540.646826] env[62619]: value = "task-1777643" [ 1540.646826] env[62619]: _type = "Task" [ 1540.646826] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.671217] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777643, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.854056] env[62619]: DEBUG oslo_vmware.api [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777640, 'name': PowerOnVM_Task, 'duration_secs': 0.50914} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.854371] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1540.854618] env[62619]: INFO nova.compute.manager [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Took 8.78 seconds to spawn the instance on the hypervisor. [ 1540.854819] env[62619]: DEBUG nova.compute.manager [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1540.855708] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f18f68-d17f-4334-9368-f18e6997bc4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.901844] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777641, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.960486] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777642, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.083331] env[62619]: DEBUG nova.compute.manager [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1541.117261] env[62619]: DEBUG nova.network.neutron [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Successfully created port: 95751072-9868-4775-a7ca-205205689a74 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1541.129017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-03973889-b223-4663-850a-19c2b7208a60 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.382s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.163614] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777643, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.383026] env[62619]: INFO nova.compute.manager [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Took 42.72 seconds to build instance. [ 1541.403139] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777641, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.408659] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "cef20063-96f0-46cc-9f7d-4436b60216c6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.408913] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.458959] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777642, 'name': CreateSnapshot_Task, 'duration_secs': 0.6279} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.461626] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1541.462593] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4833f307-f829-4b66-aded-32416e409845 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.618962] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96db1a1-9e5b-4729-bb70-7fa4c02b845d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.628116] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac00d982-3d7a-4a7d-8bb6-72e6010e2c3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.663486] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c621748-65b0-496e-9edd-c1ebdce84e12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.674467] env[62619]: DEBUG oslo_vmware.api [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777643, 'name': PowerOnVM_Task, 'duration_secs': 0.718489} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.675154] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1541.675302] env[62619]: DEBUG nova.compute.manager [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1541.678579] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f78f4db-bbfc-41b5-b7bd-d0466e7b8a02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.681073] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ea4374-a677-4b52-9ff2-562f5ac1727f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.697013] env[62619]: DEBUG nova.compute.provider_tree [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1541.886107] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1a81ef15-77e5-4ea7-b724-f4c40eb6b090 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "0272ca2a-e9ff-4af5-8120-278a82d74627" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.346s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.902112] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777641, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.912036] env[62619]: INFO nova.compute.manager [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Detaching volume 29d5a6b8-d9f2-4372-b79f-d8066f83c85d [ 1541.949760] env[62619]: INFO nova.virt.block_device [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Attempting to driver detach volume 29d5a6b8-d9f2-4372-b79f-d8066f83c85d from mountpoint /dev/sdb [ 1541.950019] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1541.950211] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369019', 'volume_id': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'name': 'volume-29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cef20063-96f0-46cc-9f7d-4436b60216c6', 'attached_at': '', 'detached_at': '', 'volume_id': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'serial': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1541.951466] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f24c83-6ade-4d81-b60f-6dc83e4af0d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.981805] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1541.982234] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-87a1045d-b5e3-4917-9564-aa40bf3b9ea5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.985739] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a3400e-eb5c-4d29-87c2-48bdef81dfac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.996791] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13aad579-7e5e-4231-ae9c-621154351c65 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.999895] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1541.999895] env[62619]: value = "task-1777644" [ 1541.999895] env[62619]: _type = "Task" [ 1541.999895] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.020135] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46111fec-1ab0-48ac-a321-78106b585216 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.025821] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777644, 'name': CloneVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.041734] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] The volume has not been displaced from its original location: [datastore1] volume-29d5a6b8-d9f2-4372-b79f-d8066f83c85d/volume-29d5a6b8-d9f2-4372-b79f-d8066f83c85d.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1542.046032] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Reconfiguring VM instance instance-00000025 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1542.046146] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff3ab7a8-8638-4c82-957b-38b7bf7ca65e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.070585] env[62619]: DEBUG oslo_vmware.api [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1542.070585] env[62619]: value = "task-1777645" [ 1542.070585] env[62619]: _type = "Task" [ 1542.070585] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.081797] env[62619]: DEBUG oslo_vmware.api [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777645, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.093213] env[62619]: DEBUG nova.compute.manager [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1542.126343] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1542.126701] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1542.126878] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1542.128735] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1542.128930] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1542.129102] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1542.129321] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1542.129569] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1542.129623] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1542.129776] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1542.129942] env[62619]: DEBUG nova.virt.hardware [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1542.130849] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb0ad04-5b52-4944-b381-04a90ee2c231 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.140770] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32799b71-3b4d-4482-9aa2-61cd8b04e465 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.203967] env[62619]: DEBUG nova.scheduler.client.report [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1542.217130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.402948] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777641, 'name': CloneVM_Task, 'duration_secs': 1.787447} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.403587] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Created linked-clone VM from snapshot [ 1542.404492] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9c356e-a9ef-45c2-bbe2-45a319df76a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.413104] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Uploading image 88432144-700d-4829-a1f6-4d35530dfc87 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1542.440068] env[62619]: DEBUG oslo_vmware.rw_handles [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1542.440068] env[62619]: value = "vm-369022" [ 1542.440068] env[62619]: _type = "VirtualMachine" [ 1542.440068] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1542.441223] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ef2b75fe-8158-4f27-a348-795f4b5d3b9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.452289] env[62619]: DEBUG oslo_vmware.rw_handles [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lease: (returnval){ [ 1542.452289] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529810b5-dfed-002e-cfeb-72afca71aae1" [ 1542.452289] env[62619]: _type = "HttpNfcLease" [ 1542.452289] env[62619]: } obtained for exporting VM: (result){ [ 1542.452289] env[62619]: value = "vm-369022" [ 1542.452289] env[62619]: _type = "VirtualMachine" [ 1542.452289] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1542.452289] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the lease: (returnval){ [ 1542.452289] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529810b5-dfed-002e-cfeb-72afca71aae1" [ 1542.452289] env[62619]: _type = "HttpNfcLease" [ 1542.452289] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1542.459809] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1542.459809] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529810b5-dfed-002e-cfeb-72afca71aae1" [ 1542.459809] env[62619]: _type = "HttpNfcLease" [ 1542.459809] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1542.470992] env[62619]: DEBUG nova.compute.manager [req-bb017c90-3f51-4b2d-b680-1d9b2a2fa795 req-3c1d74ba-0710-4099-a67b-65b7d4130bf5 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Received event network-changed-f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1542.471216] env[62619]: DEBUG nova.compute.manager [req-bb017c90-3f51-4b2d-b680-1d9b2a2fa795 req-3c1d74ba-0710-4099-a67b-65b7d4130bf5 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Refreshing instance network info cache due to event network-changed-f3bed801-3e28-49e7-83d3-60dcdf9a38ea. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1542.471601] env[62619]: DEBUG oslo_concurrency.lockutils [req-bb017c90-3f51-4b2d-b680-1d9b2a2fa795 req-3c1d74ba-0710-4099-a67b-65b7d4130bf5 service nova] Acquiring lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.471695] env[62619]: DEBUG oslo_concurrency.lockutils [req-bb017c90-3f51-4b2d-b680-1d9b2a2fa795 req-3c1d74ba-0710-4099-a67b-65b7d4130bf5 service nova] Acquired lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.471866] env[62619]: DEBUG nova.network.neutron [req-bb017c90-3f51-4b2d-b680-1d9b2a2fa795 req-3c1d74ba-0710-4099-a67b-65b7d4130bf5 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Refreshing network info cache for port f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1542.512632] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777644, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.583106] env[62619]: DEBUG oslo_vmware.api [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777645, 'name': ReconfigVM_Task, 'duration_secs': 0.455647} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.583729] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Reconfigured VM instance instance-00000025 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1542.588894] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d5cd10d-8fae-43bf-963a-0dee46950a5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.607024] env[62619]: DEBUG oslo_vmware.api [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1542.607024] env[62619]: value = "task-1777647" [ 1542.607024] env[62619]: _type = "Task" [ 1542.607024] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.619910] env[62619]: DEBUG oslo_vmware.api [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777647, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.713715] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.714300] env[62619]: DEBUG nova.compute.manager [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1542.717288] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.805s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.717556] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.719675] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.720s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.719904] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.721664] env[62619]: DEBUG oslo_concurrency.lockutils [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.611s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.721888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.723399] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.492s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.725069] env[62619]: INFO nova.compute.claims [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1542.750996] env[62619]: INFO nova.scheduler.client.report [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Deleted allocations for instance 80363e16-5dd2-42ad-9ead-25b121d62211 [ 1542.761298] env[62619]: INFO nova.scheduler.client.report [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Deleted allocations for instance 060427a2-e724-4c51-879e-675154ae5df2 [ 1542.792281] env[62619]: INFO nova.scheduler.client.report [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Deleted allocations for instance 4ee81568-ad9a-4ded-b6fe-15503d85968e [ 1542.951224] env[62619]: DEBUG nova.compute.manager [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1542.965163] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1542.965163] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529810b5-dfed-002e-cfeb-72afca71aae1" [ 1542.965163] env[62619]: _type = "HttpNfcLease" [ 1542.965163] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1542.965499] env[62619]: DEBUG oslo_vmware.rw_handles [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1542.965499] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529810b5-dfed-002e-cfeb-72afca71aae1" [ 1542.965499] env[62619]: _type = "HttpNfcLease" [ 1542.965499] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1542.966274] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dbc5d0-6f40-45f5-a4ab-2dfa620ecdb2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.982078] env[62619]: DEBUG oslo_vmware.rw_handles [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa7a12-0d17-59ca-3e82-187cb33023ff/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1542.982358] env[62619]: DEBUG oslo_vmware.rw_handles [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa7a12-0d17-59ca-3e82-187cb33023ff/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1543.065102] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777644, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.079389] env[62619]: DEBUG nova.network.neutron [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Successfully updated port: 95751072-9868-4775-a7ca-205205689a74 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1543.099065] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-22fd8d2f-e2f2-4a87-b240-170afff8c9f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.117456] env[62619]: DEBUG oslo_vmware.api [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777647, 'name': ReconfigVM_Task, 'duration_secs': 0.215144} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.119370] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369019', 'volume_id': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'name': 'volume-29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cef20063-96f0-46cc-9f7d-4436b60216c6', 'attached_at': '', 'detached_at': '', 'volume_id': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d', 'serial': '29d5a6b8-d9f2-4372-b79f-d8066f83c85d'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1543.230862] env[62619]: DEBUG nova.compute.utils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1543.234265] env[62619]: DEBUG nova.compute.manager [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1543.234704] env[62619]: DEBUG nova.network.neutron [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1543.267448] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "0272ca2a-e9ff-4af5-8120-278a82d74627" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.267711] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "0272ca2a-e9ff-4af5-8120-278a82d74627" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.267938] env[62619]: INFO nova.compute.manager [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Rebooting instance [ 1543.282595] env[62619]: DEBUG oslo_concurrency.lockutils [None req-14400ed9-7061-408b-aca7-2f370441dd0c tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "80363e16-5dd2-42ad-9ead-25b121d62211" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.422s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.283940] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0fbf60c3-8958-4c0d-99c5-071ec540f7b0 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "060427a2-e724-4c51-879e-675154ae5df2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.581s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.302786] env[62619]: DEBUG oslo_concurrency.lockutils [None req-44091ef2-8481-4511-ab11-678f84939f22 tempest-ServersAdminNegativeTestJSON-620776328 tempest-ServersAdminNegativeTestJSON-620776328-project-member] Lock "4ee81568-ad9a-4ded-b6fe-15503d85968e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.667s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.333252] env[62619]: DEBUG nova.policy [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e9094d6b3854c1184307d9bc35a966e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e11e1bca0c747fd8b4a0ca3e220ba4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1543.409582] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Acquiring lock "ed34ae20-a891-45aa-8124-f36f264937f8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.409820] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Lock "ed34ae20-a891-45aa-8124-f36f264937f8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.446070] env[62619]: DEBUG nova.network.neutron [req-bb017c90-3f51-4b2d-b680-1d9b2a2fa795 req-3c1d74ba-0710-4099-a67b-65b7d4130bf5 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Updated VIF entry in instance network info cache for port f3bed801-3e28-49e7-83d3-60dcdf9a38ea. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1543.446434] env[62619]: DEBUG nova.network.neutron [req-bb017c90-3f51-4b2d-b680-1d9b2a2fa795 req-3c1d74ba-0710-4099-a67b-65b7d4130bf5 service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Updating instance_info_cache with network_info: [{"id": "f3bed801-3e28-49e7-83d3-60dcdf9a38ea", "address": "fa:16:3e:2b:af:6a", "network": {"id": "d28ddb7a-9db3-465f-8343-8f23b12b5183", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2033557069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "333f40a8350d4a4586cd2236bc63bef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bed801-3e", "ovs_interfaceid": "f3bed801-3e28-49e7-83d3-60dcdf9a38ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.447144] env[62619]: WARNING oslo_messaging._drivers.amqpdriver [req-bb017c90-3f51-4b2d-b680-1d9b2a2fa795 req-3c1d74ba-0710-4099-a67b-65b7d4130bf5 service nova] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1543.479981] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.566475] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777644, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.570495] env[62619]: INFO nova.compute.manager [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Rebuilding instance [ 1543.584031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquiring lock "refresh_cache-79dfeb2b-06d0-45f1-b97e-10fa4f00d282" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1543.584031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquired lock "refresh_cache-79dfeb2b-06d0-45f1-b97e-10fa4f00d282" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.584031] env[62619]: DEBUG nova.network.neutron [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1543.635896] env[62619]: DEBUG nova.compute.manager [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1543.636904] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cb22a1-7cc1-44c9-be7b-1eae9a6a74b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.703902] env[62619]: DEBUG nova.objects.instance [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lazy-loading 'flavor' on Instance uuid cef20063-96f0-46cc-9f7d-4436b60216c6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1543.724539] env[62619]: DEBUG nova.network.neutron [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Successfully created port: 535fa511-22ab-4762-80f3-e92464fddeb5 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1543.734830] env[62619]: DEBUG nova.compute.manager [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1543.806080] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1543.913774] env[62619]: DEBUG nova.compute.utils [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1543.952797] env[62619]: DEBUG oslo_concurrency.lockutils [req-bb017c90-3f51-4b2d-b680-1d9b2a2fa795 req-3c1d74ba-0710-4099-a67b-65b7d4130bf5 service nova] Releasing lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.954189] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquired lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.954726] env[62619]: DEBUG nova.network.neutron [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1544.062788] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777644, 'name': CloneVM_Task, 'duration_secs': 1.770999} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.063139] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Created linked-clone VM from snapshot [ 1544.064426] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb71a28-a3fc-41d3-9d65-7ba318497a55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.080087] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Uploading image cce1f626-0c24-4d2e-b753-64401418a36e {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1544.119402] env[62619]: DEBUG oslo_vmware.rw_handles [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1544.119402] env[62619]: value = "vm-369024" [ 1544.119402] env[62619]: _type = "VirtualMachine" [ 1544.119402] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1544.119709] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-422de734-e2f0-406f-aecc-2eaf185a2fff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.129739] env[62619]: DEBUG oslo_vmware.rw_handles [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lease: (returnval){ [ 1544.129739] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52068726-9d18-5f7e-b5f6-0f7bb97c29f0" [ 1544.129739] env[62619]: _type = "HttpNfcLease" [ 1544.129739] env[62619]: } obtained for exporting VM: (result){ [ 1544.129739] env[62619]: value = "vm-369024" [ 1544.129739] env[62619]: _type = "VirtualMachine" [ 1544.129739] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1544.131028] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the lease: (returnval){ [ 1544.131028] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52068726-9d18-5f7e-b5f6-0f7bb97c29f0" [ 1544.131028] env[62619]: _type = "HttpNfcLease" [ 1544.131028] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1544.141016] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1544.141016] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52068726-9d18-5f7e-b5f6-0f7bb97c29f0" [ 1544.141016] env[62619]: _type = "HttpNfcLease" [ 1544.141016] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1544.159508] env[62619]: DEBUG nova.network.neutron [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1544.218119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.218253] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.218253] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.218571] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.219112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.224279] env[62619]: INFO nova.compute.manager [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Terminating instance [ 1544.354224] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eff33c7-4514-45ea-a306-a9d7c9f46d4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.365096] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aceff05e-808c-4ad0-907a-e9123b5b35b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.408144] env[62619]: DEBUG nova.network.neutron [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Updating instance_info_cache with network_info: [{"id": "95751072-9868-4775-a7ca-205205689a74", "address": "fa:16:3e:6e:28:4d", "network": {"id": "41b6e9fe-c121-4e79-9a3c-712ea7dab59a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-18026459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3015193883ab4a67a9c084424c40806c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95751072-98", "ovs_interfaceid": "95751072-9868-4775-a7ca-205205689a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.410334] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b226e10-2823-479b-9d0b-9d7a8649fe81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.419411] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Lock "ed34ae20-a891-45aa-8124-f36f264937f8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.427309] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b91ada-2b0f-4f5c-86a3-0783841a35a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.443755] env[62619]: DEBUG nova.compute.provider_tree [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1544.639877] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1544.639877] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52068726-9d18-5f7e-b5f6-0f7bb97c29f0" [ 1544.639877] env[62619]: _type = "HttpNfcLease" [ 1544.639877] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1544.640207] env[62619]: DEBUG oslo_vmware.rw_handles [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1544.640207] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52068726-9d18-5f7e-b5f6-0f7bb97c29f0" [ 1544.640207] env[62619]: _type = "HttpNfcLease" [ 1544.640207] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1544.641030] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc67f2ec-ac27-4df4-8de4-e9635a9e06fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.649808] env[62619]: DEBUG oslo_vmware.rw_handles [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521d0e36-df5a-1109-9df4-bd2ac6551d4f/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1544.649936] env[62619]: DEBUG oslo_vmware.rw_handles [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521d0e36-df5a-1109-9df4-bd2ac6551d4f/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1544.708273] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1544.711032] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24f5b9a9-5ee6-4bed-a01b-c898cb2be774 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.715554] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c3141a0-8f68-40a6-8a3b-6f4bdd1281a0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.306s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.719640] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1544.719640] env[62619]: value = "task-1777649" [ 1544.719640] env[62619]: _type = "Task" [ 1544.719640] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.732597] env[62619]: DEBUG nova.compute.manager [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1544.732860] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1544.733226] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777649, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.734195] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a8c4e3-aff1-414c-8064-e9e05933ca73 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.741632] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1544.742252] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d330d184-2554-4fb9-9c41-4f6b1616c885 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.749114] env[62619]: DEBUG oslo_vmware.api [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1544.749114] env[62619]: value = "task-1777650" [ 1544.749114] env[62619]: _type = "Task" [ 1544.749114] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.750160] env[62619]: DEBUG nova.compute.manager [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1544.763531] env[62619]: DEBUG oslo_vmware.api [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777650, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.766244] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1becac15-6596-44c2-989b-3e2ce741ed82 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.790756] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1544.791015] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1544.791184] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1544.791362] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1544.791542] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1544.791707] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1544.791850] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1544.792015] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1544.792179] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1544.792330] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1544.792496] env[62619]: DEBUG nova.virt.hardware [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1544.794659] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e798467e-859f-4652-911b-9d686acaa75c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.805596] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ba5f3c-9e97-45e3-a5a2-aded09087f0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.897350] env[62619]: DEBUG nova.network.neutron [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Updating instance_info_cache with network_info: [{"id": "f3bed801-3e28-49e7-83d3-60dcdf9a38ea", "address": "fa:16:3e:2b:af:6a", "network": {"id": "d28ddb7a-9db3-465f-8343-8f23b12b5183", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2033557069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "333f40a8350d4a4586cd2236bc63bef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bed801-3e", "ovs_interfaceid": "f3bed801-3e28-49e7-83d3-60dcdf9a38ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.913801] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Releasing lock "refresh_cache-79dfeb2b-06d0-45f1-b97e-10fa4f00d282" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.917570] env[62619]: DEBUG nova.compute.manager [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Instance network_info: |[{"id": "95751072-9868-4775-a7ca-205205689a74", "address": "fa:16:3e:6e:28:4d", "network": {"id": "41b6e9fe-c121-4e79-9a3c-712ea7dab59a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-18026459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3015193883ab4a67a9c084424c40806c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95751072-98", "ovs_interfaceid": "95751072-9868-4775-a7ca-205205689a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1544.917570] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:28:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac2c9d07-ed01-47a9-88f1-562992bc1076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '95751072-9868-4775-a7ca-205205689a74', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1544.923217] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Creating folder: Project (3015193883ab4a67a9c084424c40806c). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1544.924298] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c655396e-956a-41b1-aa45-7658a6d2d240 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.935333] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Created folder: Project (3015193883ab4a67a9c084424c40806c) in parent group-v368875. [ 1544.935556] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Creating folder: Instances. Parent ref: group-v369025. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1544.937824] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ffec6ff-6097-47a8-bfc0-b0af1006ae37 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.950182] env[62619]: DEBUG nova.scheduler.client.report [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1544.954640] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Created folder: Instances in parent group-v369025. [ 1544.954976] env[62619]: DEBUG oslo.service.loopingcall [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1544.957024] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1544.957024] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0be46e1f-08e8-4039-8078-af6a36a8187a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.978343] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1544.978343] env[62619]: value = "task-1777653" [ 1544.978343] env[62619]: _type = "Task" [ 1544.978343] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.988352] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777653, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.183783] env[62619]: DEBUG nova.compute.manager [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Received event network-vif-plugged-95751072-9868-4775-a7ca-205205689a74 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1545.185019] env[62619]: DEBUG oslo_concurrency.lockutils [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] Acquiring lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.185019] env[62619]: DEBUG oslo_concurrency.lockutils [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] Lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.185019] env[62619]: DEBUG oslo_concurrency.lockutils [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] Lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.185019] env[62619]: DEBUG nova.compute.manager [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] No waiting events found dispatching network-vif-plugged-95751072-9868-4775-a7ca-205205689a74 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1545.185019] env[62619]: WARNING nova.compute.manager [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Received unexpected event network-vif-plugged-95751072-9868-4775-a7ca-205205689a74 for instance with vm_state building and task_state spawning. [ 1545.185019] env[62619]: DEBUG nova.compute.manager [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Received event network-changed-95751072-9868-4775-a7ca-205205689a74 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1545.185545] env[62619]: DEBUG nova.compute.manager [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Refreshing instance network info cache due to event network-changed-95751072-9868-4775-a7ca-205205689a74. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1545.185911] env[62619]: DEBUG oslo_concurrency.lockutils [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] Acquiring lock "refresh_cache-79dfeb2b-06d0-45f1-b97e-10fa4f00d282" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.187869] env[62619]: DEBUG oslo_concurrency.lockutils [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] Acquired lock "refresh_cache-79dfeb2b-06d0-45f1-b97e-10fa4f00d282" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.187869] env[62619]: DEBUG nova.network.neutron [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Refreshing network info cache for port 95751072-9868-4775-a7ca-205205689a74 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1545.230781] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777649, 'name': PowerOffVM_Task, 'duration_secs': 0.253148} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.231663] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1545.232129] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1545.233133] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4b2f24-edd4-4957-9dec-78b0c12fd91a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.243792] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1545.244464] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c9bb3c8-02bc-4df6-991f-bd9bd5d67e4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.265157] env[62619]: DEBUG oslo_vmware.api [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777650, 'name': PowerOffVM_Task, 'duration_secs': 0.223962} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.265678] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1545.265955] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1545.266299] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bda563cd-6ffc-48ad-8e35-55ee11f0c035 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.395562] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1545.395562] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1545.395562] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Deleting the datastore file [datastore1] 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1545.396053] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebf11ff6-0d71-4f5e-b1ab-7675099b4a2a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.403166] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Releasing lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.405118] env[62619]: DEBUG oslo_vmware.api [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for the task: (returnval){ [ 1545.405118] env[62619]: value = "task-1777656" [ 1545.405118] env[62619]: _type = "Task" [ 1545.405118] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.411821] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1545.413050] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1545.413050] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleting the datastore file [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1545.414256] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bb687f0-ea02-4745-9c89-8385e29ba435 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.421240] env[62619]: DEBUG oslo_vmware.api [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.428120] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1545.428120] env[62619]: value = "task-1777657" [ 1545.428120] env[62619]: _type = "Task" [ 1545.428120] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.441267] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777657, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.458402] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.459069] env[62619]: DEBUG nova.compute.manager [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1545.463009] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.572s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.463473] env[62619]: DEBUG nova.objects.instance [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1545.492092] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777653, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.559033] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Acquiring lock "ed34ae20-a891-45aa-8124-f36f264937f8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.559033] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Lock "ed34ae20-a891-45aa-8124-f36f264937f8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.559033] env[62619]: INFO nova.compute.manager [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Attaching volume afcc9063-7d91-4cc2-8215-be590afb876a to /dev/sdb [ 1545.596169] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2325f9dd-00d2-4a29-9333-a6a802d36234 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.603809] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b18c57-7fc2-4c3f-aff2-ae9f4dcad946 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.624386] env[62619]: DEBUG nova.virt.block_device [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Updating existing volume attachment record: d895ffe4-9ddc-43b1-bf01-3963d743855c {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1545.652818] env[62619]: DEBUG nova.network.neutron [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Successfully updated port: 535fa511-22ab-4762-80f3-e92464fddeb5 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1545.922386] env[62619]: DEBUG oslo_vmware.api [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.931825] env[62619]: DEBUG nova.compute.manager [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1545.931825] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f755a7fe-ff9b-4a68-a315-214dc9153101 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.950574] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777657, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.965119] env[62619]: DEBUG nova.compute.utils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1545.967086] env[62619]: DEBUG nova.compute.manager [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1545.969810] env[62619]: DEBUG nova.network.neutron [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1545.994328] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777653, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.156191] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "refresh_cache-0a80942c-eb86-480b-ab7b-33112dd90d28" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.156191] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "refresh_cache-0a80942c-eb86-480b-ab7b-33112dd90d28" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.156191] env[62619]: DEBUG nova.network.neutron [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1546.168668] env[62619]: DEBUG nova.network.neutron [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Updated VIF entry in instance network info cache for port 95751072-9868-4775-a7ca-205205689a74. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1546.169215] env[62619]: DEBUG nova.network.neutron [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Updating instance_info_cache with network_info: [{"id": "95751072-9868-4775-a7ca-205205689a74", "address": "fa:16:3e:6e:28:4d", "network": {"id": "41b6e9fe-c121-4e79-9a3c-712ea7dab59a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-18026459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3015193883ab4a67a9c084424c40806c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95751072-98", "ovs_interfaceid": "95751072-9868-4775-a7ca-205205689a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.201718] env[62619]: DEBUG nova.policy [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '337abea0d4414c9895156256801a3629', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8311e2dca4814727b91967833796fc66', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1546.418871] env[62619]: DEBUG oslo_vmware.api [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.446928] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777657, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.471244] env[62619]: DEBUG nova.network.neutron [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Successfully created port: 7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1546.473820] env[62619]: DEBUG nova.compute.manager [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1546.477874] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ca820fd-127e-4949-8df4-ed1fa0008835 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.479725] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.729s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.480075] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.483865] env[62619]: DEBUG oslo_concurrency.lockutils [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.564s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.483865] env[62619]: DEBUG oslo_concurrency.lockutils [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.484952] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.763s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.486217] env[62619]: INFO nova.compute.claims [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1546.500996] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777653, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.518906] env[62619]: INFO nova.scheduler.client.report [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Deleted allocations for instance d4230edc-cfda-4b9f-ab42-2f39c699ff03 [ 1546.522695] env[62619]: INFO nova.scheduler.client.report [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted allocations for instance 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c [ 1546.672906] env[62619]: DEBUG oslo_concurrency.lockutils [req-316a03fb-df9a-45d7-a910-a7919b95a2b4 req-03428b20-4032-4aea-bcc3-b3b51fb982fe service nova] Releasing lock "refresh_cache-79dfeb2b-06d0-45f1-b97e-10fa4f00d282" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.704966] env[62619]: DEBUG nova.network.neutron [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1546.857705] env[62619]: DEBUG nova.network.neutron [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Successfully created port: 4603d400-5a47-4629-9915-a5315eeca473 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1546.917780] env[62619]: DEBUG oslo_vmware.api [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.948859] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777657, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.955020] env[62619]: DEBUG nova.network.neutron [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Updating instance_info_cache with network_info: [{"id": "535fa511-22ab-4762-80f3-e92464fddeb5", "address": "fa:16:3e:41:e6:05", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap535fa511-22", "ovs_interfaceid": "535fa511-22ab-4762-80f3-e92464fddeb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.959516] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b655062-d31a-425c-88df-bdf45004ffbf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.967175] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Doing hard reboot of VM {{(pid=62619) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1546.967447] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-73555242-f163-4900-ab1e-ac361cfdca4d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.973965] env[62619]: DEBUG oslo_vmware.api [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1546.973965] env[62619]: value = "task-1777661" [ 1546.973965] env[62619]: _type = "Task" [ 1546.973965] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.992025] env[62619]: DEBUG oslo_vmware.api [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777661, 'name': ResetVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.002981] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777653, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.041583] env[62619]: DEBUG oslo_concurrency.lockutils [None req-63084014-4e2c-4a62-ad05-b72ef157d0e0 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.568s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.046023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa7c70dc-f0c5-4472-8957-f6f383b9a8c2 tempest-ServersTestJSON-1797885290 tempest-ServersTestJSON-1797885290-project-member] Lock "d4230edc-cfda-4b9f-ab42-2f39c699ff03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.113s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.416861] env[62619]: DEBUG oslo_vmware.api [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.447256] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777657, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.457735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "refresh_cache-0a80942c-eb86-480b-ab7b-33112dd90d28" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.457735] env[62619]: DEBUG nova.compute.manager [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Instance network_info: |[{"id": "535fa511-22ab-4762-80f3-e92464fddeb5", "address": "fa:16:3e:41:e6:05", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap535fa511-22", "ovs_interfaceid": "535fa511-22ab-4762-80f3-e92464fddeb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1547.458717] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:e6:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '535fa511-22ab-4762-80f3-e92464fddeb5', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1547.468677] env[62619]: DEBUG oslo.service.loopingcall [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1547.469059] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1547.469359] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-580b21b3-0188-440d-8b9e-748f41774a05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.494439] env[62619]: DEBUG nova.compute.manager [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1547.496415] env[62619]: DEBUG oslo_vmware.api [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777661, 'name': ResetVM_Task, 'duration_secs': 0.159232} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.500582] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Did hard reboot of VM {{(pid=62619) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1547.500864] env[62619]: DEBUG nova.compute.manager [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1547.501145] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1547.501145] env[62619]: value = "task-1777662" [ 1547.501145] env[62619]: _type = "Task" [ 1547.501145] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.505485] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af9629e-e71e-4bd2-9a3e-0fa35aaf1f29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.516885] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777653, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.521310] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777662, 'name': CreateVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.922531] env[62619]: DEBUG oslo_vmware.api [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Task: {'id': task-1777656, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.49164} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.922997] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1547.923320] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1547.923633] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1547.923991] env[62619]: INFO nova.compute.manager [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Took 3.19 seconds to destroy the instance on the hypervisor. [ 1547.926017] env[62619]: DEBUG oslo.service.loopingcall [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1547.926017] env[62619]: DEBUG nova.compute.manager [-] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1547.926017] env[62619]: DEBUG nova.network.neutron [-] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1547.949165] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777657, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.464986} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.952238] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1547.952490] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1547.952642] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1548.000422] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971fc7ba-df8f-4fa2-85e3-5c1b7a58cf67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.018550] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777653, 'name': CreateVM_Task, 'duration_secs': 2.962931} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.019754] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1548.020590] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe8e7d2-8608-4dee-8286-123bcaccd20d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.029244] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.029427] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.029732] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1548.032613] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777662, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.033183] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7e31dd2e-146d-428c-bfc7-5089e724924b tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "0272ca2a-e9ff-4af5-8120-278a82d74627" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.765s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.034275] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de3860b4-2c39-4607-baf1-8665b562abce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.069402] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611e04ff-c4f9-4546-9fc6-5b882f8c535d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.072768] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for the task: (returnval){ [ 1548.072768] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52868f10-86d4-990b-e9f5-2a60c88e5327" [ 1548.072768] env[62619]: _type = "Task" [ 1548.072768] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.081032] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffcb379a-786e-4afd-9e79-d5800321d95f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.089277] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52868f10-86d4-990b-e9f5-2a60c88e5327, 'name': SearchDatastore_Task, 'duration_secs': 0.017131} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.089950] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.090224] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1548.090453] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.090593] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.090774] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1548.091043] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d8b07a9-26c2-403d-9f07-c116233f9ecc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.102105] env[62619]: DEBUG nova.compute.provider_tree [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1548.113406] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1548.113668] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1548.114552] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d221dfa6-50dc-43a9-b417-b7d52a89c968 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.120767] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for the task: (returnval){ [ 1548.120767] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52234d4a-2bfc-178b-56f0-f455bf062556" [ 1548.120767] env[62619]: _type = "Task" [ 1548.120767] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.130573] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52234d4a-2bfc-178b-56f0-f455bf062556, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.461226] env[62619]: DEBUG nova.network.neutron [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Successfully updated port: 7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1548.521493] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777662, 'name': CreateVM_Task, 'duration_secs': 0.586272} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.521683] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1548.522394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.522549] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.522871] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1548.523139] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-533fc094-b193-4c55-b43b-528c75556965 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.528193] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1548.528193] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522b5d85-92db-9257-0abb-ad1da63b3929" [ 1548.528193] env[62619]: _type = "Task" [ 1548.528193] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.536532] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522b5d85-92db-9257-0abb-ad1da63b3929, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.606096] env[62619]: DEBUG nova.scheduler.client.report [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1548.636783] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52234d4a-2bfc-178b-56f0-f455bf062556, 'name': SearchDatastore_Task, 'duration_secs': 0.021786} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.638067] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98f661a1-2013-45fa-a0cb-225e6e10301d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.645824] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for the task: (returnval){ [ 1548.645824] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fb4218-1217-d4b1-f004-f8a4e360c4ba" [ 1548.645824] env[62619]: _type = "Task" [ 1548.645824] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.654624] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fb4218-1217-d4b1-f004-f8a4e360c4ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.679514] env[62619]: DEBUG nova.network.neutron [-] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.041584] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522b5d85-92db-9257-0abb-ad1da63b3929, 'name': SearchDatastore_Task, 'duration_secs': 0.022105} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.041882] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.042084] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1549.042414] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.111986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.627s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.112262] env[62619]: DEBUG nova.compute.manager [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1549.115092] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.110s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.115278] env[62619]: DEBUG nova.objects.instance [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: a802534f-1766-4ea9-9188-803ef197d775] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1549.161009] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fb4218-1217-d4b1-f004-f8a4e360c4ba, 'name': SearchDatastore_Task, 'duration_secs': 0.016204} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.161009] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.161009] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 79dfeb2b-06d0-45f1-b97e-10fa4f00d282/79dfeb2b-06d0-45f1-b97e-10fa4f00d282.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1549.161888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.162102] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1549.162345] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11f98078-f21a-4e90-8b8d-db67b053ac2a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.165294] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13f71131-1c18-46bf-86ff-889dedf60414 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.175840] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for the task: (returnval){ [ 1549.175840] env[62619]: value = "task-1777664" [ 1549.175840] env[62619]: _type = "Task" [ 1549.175840] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.177573] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1549.178035] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1549.182060] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-597ed7ff-67de-46fd-aa70-bf3f88e2aa6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.185982] env[62619]: INFO nova.compute.manager [-] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Took 1.26 seconds to deallocate network for instance. [ 1549.198919] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777664, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.199578] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1549.199578] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e23e52-a8a7-1e3f-f7bd-294fa8805bce" [ 1549.199578] env[62619]: _type = "Task" [ 1549.199578] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.209674] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e23e52-a8a7-1e3f-f7bd-294fa8805bce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.623978] env[62619]: DEBUG nova.compute.utils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1549.629053] env[62619]: DEBUG nova.compute.manager [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1549.630392] env[62619]: DEBUG nova.network.neutron [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1549.688246] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777664, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.690274] env[62619]: DEBUG nova.policy [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '969682baddb94ea9bc1cb515f69344fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d72c95915b94026bd402b05f067fecb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1549.694443] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.710897] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e23e52-a8a7-1e3f-f7bd-294fa8805bce, 'name': SearchDatastore_Task, 'duration_secs': 0.016153} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.711852] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b4225ad-2d6f-42c1-b7fd-ae5e8b71cdd9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.718231] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1549.718231] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527d3a7b-d96e-83ee-7ab3-fdeb8e8605d8" [ 1549.718231] env[62619]: _type = "Task" [ 1549.718231] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.727500] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527d3a7b-d96e-83ee-7ab3-fdeb8e8605d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.037011] env[62619]: DEBUG nova.network.neutron [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Successfully created port: 35590c26-21c7-47f0-9a37-848ba413367f {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1550.129787] env[62619]: DEBUG nova.compute.manager [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1550.139390] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c6eb0d3e-5bde-4ab0-9d27-e1838bd35851 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.024s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.140456] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.406s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.142886] env[62619]: INFO nova.compute.claims [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1550.190676] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777664, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643144} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.190927] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 79dfeb2b-06d0-45f1-b97e-10fa4f00d282/79dfeb2b-06d0-45f1-b97e-10fa4f00d282.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1550.193109] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1550.194435] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1550.194663] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369030', 'volume_id': 'afcc9063-7d91-4cc2-8215-be590afb876a', 'name': 'volume-afcc9063-7d91-4cc2-8215-be590afb876a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ed34ae20-a891-45aa-8124-f36f264937f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'afcc9063-7d91-4cc2-8215-be590afb876a', 'serial': 'afcc9063-7d91-4cc2-8215-be590afb876a'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1550.194983] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee470da2-fd9e-4723-8cf5-18d9431fd0dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.197675] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6213a7fc-b094-46a5-b344-b4036c117c01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.220847] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64066bdc-11fa-4858-9cbd-567bb371aa97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.224411] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for the task: (returnval){ [ 1550.224411] env[62619]: value = "task-1777665" [ 1550.224411] env[62619]: _type = "Task" [ 1550.224411] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.252738] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] volume-afcc9063-7d91-4cc2-8215-be590afb876a/volume-afcc9063-7d91-4cc2-8215-be590afb876a.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1550.257328] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f1d3ed8-4058-4aee-b292-36d0765a25d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.270973] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527d3a7b-d96e-83ee-7ab3-fdeb8e8605d8, 'name': SearchDatastore_Task, 'duration_secs': 0.056566} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.274273] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.274561] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0a80942c-eb86-480b-ab7b-33112dd90d28/0a80942c-eb86-480b-ab7b-33112dd90d28.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1550.275225] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777665, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.275832] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba0df452-9d54-4464-af47-fa2bf8a45d7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.281070] env[62619]: DEBUG oslo_vmware.api [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Waiting for the task: (returnval){ [ 1550.281070] env[62619]: value = "task-1777666" [ 1550.281070] env[62619]: _type = "Task" [ 1550.281070] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.282483] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1550.282483] env[62619]: value = "task-1777667" [ 1550.282483] env[62619]: _type = "Task" [ 1550.282483] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.295590] env[62619]: DEBUG oslo_vmware.api [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Task: {'id': task-1777666, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.299244] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.537656] env[62619]: DEBUG nova.network.neutron [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Successfully updated port: 4603d400-5a47-4629-9915-a5315eeca473 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1550.741194] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777665, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111194} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.741625] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1550.742458] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a72f46d-7e68-487c-b2ea-b96af5bf2777 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.768310] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 79dfeb2b-06d0-45f1-b97e-10fa4f00d282/79dfeb2b-06d0-45f1-b97e-10fa4f00d282.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1550.768664] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fabf7edb-291a-475e-98cc-234da8a6a225 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.795329] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777667, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.799295] env[62619]: DEBUG oslo_vmware.api [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Task: {'id': task-1777666, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.799613] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for the task: (returnval){ [ 1550.799613] env[62619]: value = "task-1777668" [ 1550.799613] env[62619]: _type = "Task" [ 1550.799613] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.812060] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777668, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.043035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "refresh_cache-1f86b805-0fde-4bda-9a94-d440a670e23c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.043451] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquired lock "refresh_cache-1f86b805-0fde-4bda-9a94-d440a670e23c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.043451] env[62619]: DEBUG nova.network.neutron [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1551.144868] env[62619]: DEBUG nova.compute.manager [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1551.302086] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777667, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654994} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.302464] env[62619]: DEBUG oslo_vmware.api [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Task: {'id': task-1777666, 'name': ReconfigVM_Task, 'duration_secs': 0.7774} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.302691] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0a80942c-eb86-480b-ab7b-33112dd90d28/0a80942c-eb86-480b-ab7b-33112dd90d28.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1551.302903] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1551.306811] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Reconfigured VM instance instance-0000000b to attach disk [datastore1] volume-afcc9063-7d91-4cc2-8215-be590afb876a/volume-afcc9063-7d91-4cc2-8215-be590afb876a.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1551.314306] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3cfab0b7-9d13-4019-ba78-8584e95b66d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.316317] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66e53eaa-3edf-4cd8-b11d-34717f7f49ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.337422] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1551.337422] env[62619]: value = "task-1777669" [ 1551.337422] env[62619]: _type = "Task" [ 1551.337422] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.337831] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777668, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.342115] env[62619]: DEBUG oslo_vmware.api [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Waiting for the task: (returnval){ [ 1551.342115] env[62619]: value = "task-1777670" [ 1551.342115] env[62619]: _type = "Task" [ 1551.342115] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.363193] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777669, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.363467] env[62619]: DEBUG oslo_vmware.api [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Task: {'id': task-1777670, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.585244] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c98c494-0779-412e-afe7-c10bddb36cf3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.594361] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e84329-2fc5-4696-9ba2-b57fbc808767 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.598307] env[62619]: DEBUG nova.network.neutron [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1551.635746] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9616e6ee-2521-4202-9977-315354662a93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.644032] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219d1387-c185-4d4a-99ba-2e3022891390 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.657794] env[62619]: DEBUG nova.compute.provider_tree [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1551.688820] env[62619]: DEBUG nova.network.neutron [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Successfully updated port: 35590c26-21c7-47f0-9a37-848ba413367f {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1551.812548] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777668, 'name': ReconfigVM_Task, 'duration_secs': 0.590986} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.812928] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 79dfeb2b-06d0-45f1-b97e-10fa4f00d282/79dfeb2b-06d0-45f1-b97e-10fa4f00d282.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1551.813457] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc24e508-089b-422d-bc20-69e645de62a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.822458] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for the task: (returnval){ [ 1551.822458] env[62619]: value = "task-1777671" [ 1551.822458] env[62619]: _type = "Task" [ 1551.822458] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.832020] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777671, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.857211] env[62619]: DEBUG oslo_vmware.api [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Task: {'id': task-1777670, 'name': ReconfigVM_Task, 'duration_secs': 0.225458} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.860427] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369030', 'volume_id': 'afcc9063-7d91-4cc2-8215-be590afb876a', 'name': 'volume-afcc9063-7d91-4cc2-8215-be590afb876a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ed34ae20-a891-45aa-8124-f36f264937f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'afcc9063-7d91-4cc2-8215-be590afb876a', 'serial': 'afcc9063-7d91-4cc2-8215-be590afb876a'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1551.861925] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777669, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127002} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.862387] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1551.863184] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8c96e3-c6e5-4ea8-8a6a-3d69b9288d76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.885891] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 0a80942c-eb86-480b-ab7b-33112dd90d28/0a80942c-eb86-480b-ab7b-33112dd90d28.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1551.889044] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db0f1774-ecbd-454d-950d-9335e1c24299 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.910958] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1551.910958] env[62619]: value = "task-1777672" [ 1551.910958] env[62619]: _type = "Task" [ 1551.910958] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.920356] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777672, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.945541] env[62619]: DEBUG nova.network.neutron [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Updating instance_info_cache with network_info: [{"id": "7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575", "address": "fa:16:3e:fd:35:d7", "network": {"id": "65fab282-c9d0-4538-b853-bc118e26fddd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2136510104", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e9d4c42-dc", "ovs_interfaceid": "7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4603d400-5a47-4629-9915-a5315eeca473", "address": "fa:16:3e:f8:73:07", "network": {"id": "bc8f0b8b-aaf4-4e38-ad5a-c1e19b3f4a04", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-796388189", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4603d400-5a", "ovs_interfaceid": "4603d400-5a47-4629-9915-a5315eeca473", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.164619] env[62619]: DEBUG nova.scheduler.client.report [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1552.191052] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquiring lock "refresh_cache-fab3d689-9e30-4afd-b0cc-49c6d2870c50" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.191245] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquired lock "refresh_cache-fab3d689-9e30-4afd-b0cc-49c6d2870c50" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.191340] env[62619]: DEBUG nova.network.neutron [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1552.333344] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777671, 'name': Rename_Task, 'duration_secs': 0.233565} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.333690] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1552.334581] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c68c7fb-be6a-40b7-80c1-cc652a76a15b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.340354] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for the task: (returnval){ [ 1552.340354] env[62619]: value = "task-1777673" [ 1552.340354] env[62619]: _type = "Task" [ 1552.340354] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.347896] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.423085] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777672, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.438181] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1552.438488] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1552.439123] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1552.439123] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1552.439123] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1552.439545] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1552.439545] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1552.439935] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1552.439935] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1552.440100] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1552.440288] env[62619]: DEBUG nova.virt.hardware [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1552.445022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b009006-d167-4d5e-834d-f52f1cc9744c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.450369] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Releasing lock "refresh_cache-1f86b805-0fde-4bda-9a94-d440a670e23c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.450746] env[62619]: DEBUG nova.compute.manager [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Instance network_info: |[{"id": "7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575", "address": "fa:16:3e:fd:35:d7", "network": {"id": "65fab282-c9d0-4538-b853-bc118e26fddd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2136510104", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e9d4c42-dc", "ovs_interfaceid": "7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4603d400-5a47-4629-9915-a5315eeca473", "address": "fa:16:3e:f8:73:07", "network": {"id": "bc8f0b8b-aaf4-4e38-ad5a-c1e19b3f4a04", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-796388189", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4603d400-5a", "ovs_interfaceid": "4603d400-5a47-4629-9915-a5315eeca473", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1552.452340] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce4d58a-26f2-42f1-84d6-cb64d8e56f82 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.458739] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1552.458992] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1552.459127] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1552.459305] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1552.459444] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1552.459587] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1552.459807] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1552.459988] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1552.460174] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1552.460338] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1552.460503] env[62619]: DEBUG nova.virt.hardware [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1552.462885] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b48335f-c64b-4313-a8dc-83d243d4dc07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.485264] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1552.485264] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1552.485436] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1552.485511] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1552.485656] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1552.485797] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1552.486886] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1552.486886] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1552.486886] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1552.486886] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1552.486886] env[62619]: DEBUG nova.virt.hardware [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1552.487985] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2affcb34-2e3b-4756-98c8-9df62a622cf6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.492295] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:35:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16e15a36-a55b-4c27-b864-f284339009d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:73:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69cfa7ba-6989-4d75-9495-97b5fea00c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4603d400-5a47-4629-9915-a5315eeca473', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1552.504423] env[62619]: DEBUG oslo.service.loopingcall [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1552.505949] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528c8181-15a6-4dc2-af49-b825a4427380 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.510036] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1552.510036] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ca22d89-848e-4d22-a51d-8ae7dfc3d46b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.529221] env[62619]: DEBUG oslo_vmware.rw_handles [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa7a12-0d17-59ca-3e82-187cb33023ff/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1552.538422] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b905186-53b3-4f2b-99d3-3b671ddb7072 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.541409] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:19:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6faf342-2332-4eee-bdde-dafce4f0a856', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1552.548981] env[62619]: DEBUG oslo.service.loopingcall [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1552.551667] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1552.552279] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9936be1-af59-4318-aba4-063cc2c05ab0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.567509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbd3c26-d2f0-41b9-a688-73860090b435 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.574088] env[62619]: DEBUG oslo_vmware.rw_handles [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa7a12-0d17-59ca-3e82-187cb33023ff/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1552.574266] env[62619]: ERROR oslo_vmware.rw_handles [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa7a12-0d17-59ca-3e82-187cb33023ff/disk-0.vmdk due to incomplete transfer. [ 1552.574869] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1552.574869] env[62619]: value = "task-1777674" [ 1552.574869] env[62619]: _type = "Task" [ 1552.574869] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.575692] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-70fe783c-96ec-4b22-93d6-5b9c8f4fe387 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.589210] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1552.589210] env[62619]: value = "task-1777675" [ 1552.589210] env[62619]: _type = "Task" [ 1552.589210] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.595350] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777674, 'name': CreateVM_Task} progress is 15%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.600703] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777675, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.603394] env[62619]: DEBUG oslo_vmware.rw_handles [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa7a12-0d17-59ca-3e82-187cb33023ff/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1552.603621] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Uploaded image 88432144-700d-4829-a1f6-4d35530dfc87 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1552.605882] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1552.606161] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bc4a7d58-9543-41c7-876e-f14114af45d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.613637] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1552.613637] env[62619]: value = "task-1777676" [ 1552.613637] env[62619]: _type = "Task" [ 1552.613637] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.622042] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777676, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.670646] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.671544] env[62619]: DEBUG nova.compute.manager [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1552.674712] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.534s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.675143] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.677748] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.098s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.680187] env[62619]: INFO nova.compute.claims [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1552.711619] env[62619]: INFO nova.scheduler.client.report [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Deleted allocations for instance 40eeb844-7423-4818-8095-81062c7e6392 [ 1552.745317] env[62619]: DEBUG nova.network.neutron [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1552.854374] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777673, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.904904] env[62619]: DEBUG nova.objects.instance [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Lazy-loading 'flavor' on Instance uuid ed34ae20-a891-45aa-8124-f36f264937f8 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1552.925093] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777672, 'name': ReconfigVM_Task, 'duration_secs': 0.549835} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.925384] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 0a80942c-eb86-480b-ab7b-33112dd90d28/0a80942c-eb86-480b-ab7b-33112dd90d28.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1552.926326] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae7193da-3d5b-4a6c-9dec-5b8c23c3cee7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.934613] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1552.934613] env[62619]: value = "task-1777677" [ 1552.934613] env[62619]: _type = "Task" [ 1552.934613] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.944191] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777677, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.087415] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777674, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.094922] env[62619]: DEBUG nova.network.neutron [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Updating instance_info_cache with network_info: [{"id": "35590c26-21c7-47f0-9a37-848ba413367f", "address": "fa:16:3e:62:0f:c7", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35590c26-21", "ovs_interfaceid": "35590c26-21c7-47f0-9a37-848ba413367f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.103131] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777675, 'name': CreateVM_Task, 'duration_secs': 0.47464} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.103396] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1553.104060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.104234] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.104597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1553.104898] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3d8fb61-a9a9-4946-a644-5243bba2611e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.111158] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1553.111158] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52859019-413f-da20-29d4-b95b4aa6152f" [ 1553.111158] env[62619]: _type = "Task" [ 1553.111158] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.124651] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52859019-413f-da20-29d4-b95b4aa6152f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.127848] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777676, 'name': Destroy_Task, 'duration_secs': 0.501969} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.128018] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Destroyed the VM [ 1553.128281] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1553.129152] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a0440e62-09ca-468a-a4f5-2a2b29dde1bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.134526] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1553.134526] env[62619]: value = "task-1777678" [ 1553.134526] env[62619]: _type = "Task" [ 1553.134526] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.143358] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777678, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.187356] env[62619]: DEBUG nova.compute.utils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1553.191199] env[62619]: DEBUG nova.compute.manager [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1553.194571] env[62619]: DEBUG nova.network.neutron [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1553.226897] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a944e78d-cd66-4ad0-811d-3d95afe2a183 tempest-ServersTestManualDisk-1677364937 tempest-ServersTestManualDisk-1677364937-project-member] Lock "40eeb844-7423-4818-8095-81062c7e6392" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.314s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.248934] env[62619]: DEBUG nova.policy [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25159d73422b45dbbe4bab2b2a835055', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df50ba9d97ac4c059077c87f9cfdb719', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1553.351855] env[62619]: DEBUG oslo_vmware.api [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777673, 'name': PowerOnVM_Task, 'duration_secs': 0.766817} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.352255] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1553.352730] env[62619]: INFO nova.compute.manager [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Took 11.26 seconds to spawn the instance on the hypervisor. [ 1553.352945] env[62619]: DEBUG nova.compute.manager [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1553.354230] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802b78e3-a744-4d32-92be-5ea55617bff6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.413421] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e519c3be-75c6-418c-be57-8e9f6b098ff4 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Lock "ed34ae20-a891-45aa-8124-f36f264937f8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.856s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.414193] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "6cd2f6e6-79a4-41be-a349-b504028ecab4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.414586] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "6cd2f6e6-79a4-41be-a349-b504028ecab4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.452532] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777677, 'name': Rename_Task, 'duration_secs': 0.235895} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.452835] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1553.454069] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92ab5254-e93f-4040-a6b0-ecf7fb59d483 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.461219] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1553.461219] env[62619]: value = "task-1777679" [ 1553.461219] env[62619]: _type = "Task" [ 1553.461219] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.472162] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.590316] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777674, 'name': CreateVM_Task, 'duration_secs': 0.559966} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.590316] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1553.590316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.598053] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Releasing lock "refresh_cache-fab3d689-9e30-4afd-b0cc-49c6d2870c50" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.598502] env[62619]: DEBUG nova.compute.manager [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Instance network_info: |[{"id": "35590c26-21c7-47f0-9a37-848ba413367f", "address": "fa:16:3e:62:0f:c7", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35590c26-21", "ovs_interfaceid": "35590c26-21c7-47f0-9a37-848ba413367f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1553.599211] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:0f:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35590c26-21c7-47f0-9a37-848ba413367f', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1553.607047] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Creating folder: Project (9d72c95915b94026bd402b05f067fecb). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1553.607399] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10a3e5d9-06ac-43da-90f9-d2e79656fc72 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.621016] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52859019-413f-da20-29d4-b95b4aa6152f, 'name': SearchDatastore_Task, 'duration_secs': 0.016558} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.622157] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.623213] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1553.623213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.623213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.623213] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1553.623489] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Created folder: Project (9d72c95915b94026bd402b05f067fecb) in parent group-v368875. [ 1553.623529] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Creating folder: Instances. Parent ref: group-v369034. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1553.623809] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.624159] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1553.624450] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b023d98-97b4-4ccf-9af3-39f9583f2da8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.626389] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-061af298-5c64-4c9e-a845-a483567d4211 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.628014] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92d020d7-4451-4fd5-8d57-af44cffb2bd9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.633581] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1553.633581] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d35c17-af64-1979-ac66-210a7e0a07b4" [ 1553.633581] env[62619]: _type = "Task" [ 1553.633581] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.639442] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1553.639675] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1553.640555] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Created folder: Instances in parent group-v369034. [ 1553.640829] env[62619]: DEBUG oslo.service.loopingcall [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1553.644192] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59283ea7-0cb1-4b87-a738-dc1e8d149051 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.646761] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1553.649844] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcc71daa-ced1-4709-a946-d6b0e1e73fa7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.665641] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d35c17-af64-1979-ac66-210a7e0a07b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.675267] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1553.675267] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52806384-19ef-04da-f9a8-d830f33ce043" [ 1553.675267] env[62619]: _type = "Task" [ 1553.675267] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.676106] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777678, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.677468] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1553.677468] env[62619]: value = "task-1777682" [ 1553.677468] env[62619]: _type = "Task" [ 1553.677468] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.691386] env[62619]: DEBUG nova.compute.manager [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1553.694520] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777682, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.701828] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52806384-19ef-04da-f9a8-d830f33ce043, 'name': SearchDatastore_Task, 'duration_secs': 0.018133} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.702966] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22abb6ba-92e6-4a6c-8890-0a1f067629a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.711386] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1553.711386] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52831fdd-65d4-cd99-bff2-11a53b437013" [ 1553.711386] env[62619]: _type = "Task" [ 1553.711386] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.718866] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52831fdd-65d4-cd99-bff2-11a53b437013, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.788014] env[62619]: DEBUG nova.network.neutron [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Successfully created port: 52679af0-12c1-41ec-927a-590d8d45fce0 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1553.876047] env[62619]: INFO nova.compute.manager [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Took 50.49 seconds to build instance. [ 1553.916810] env[62619]: DEBUG nova.compute.manager [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1553.931874] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.932296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.977666] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777679, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.146246] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d35c17-af64-1979-ac66-210a7e0a07b4, 'name': SearchDatastore_Task, 'duration_secs': 0.029237} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.149188] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.149432] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1554.149669] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.155312] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777678, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.164256] env[62619]: DEBUG nova.compute.manager [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Received event network-vif-plugged-535fa511-22ab-4762-80f3-e92464fddeb5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1554.164464] env[62619]: DEBUG oslo_concurrency.lockutils [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] Acquiring lock "0a80942c-eb86-480b-ab7b-33112dd90d28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.165249] env[62619]: DEBUG oslo_concurrency.lockutils [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] Lock "0a80942c-eb86-480b-ab7b-33112dd90d28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.165249] env[62619]: DEBUG oslo_concurrency.lockutils [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] Lock "0a80942c-eb86-480b-ab7b-33112dd90d28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.165249] env[62619]: DEBUG nova.compute.manager [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] No waiting events found dispatching network-vif-plugged-535fa511-22ab-4762-80f3-e92464fddeb5 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1554.165249] env[62619]: WARNING nova.compute.manager [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Received unexpected event network-vif-plugged-535fa511-22ab-4762-80f3-e92464fddeb5 for instance with vm_state building and task_state spawning. [ 1554.165805] env[62619]: DEBUG nova.compute.manager [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Received event network-changed-535fa511-22ab-4762-80f3-e92464fddeb5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1554.165805] env[62619]: DEBUG nova.compute.manager [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Refreshing instance network info cache due to event network-changed-535fa511-22ab-4762-80f3-e92464fddeb5. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1554.165972] env[62619]: DEBUG oslo_concurrency.lockutils [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] Acquiring lock "refresh_cache-0a80942c-eb86-480b-ab7b-33112dd90d28" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.165972] env[62619]: DEBUG oslo_concurrency.lockutils [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] Acquired lock "refresh_cache-0a80942c-eb86-480b-ab7b-33112dd90d28" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.166208] env[62619]: DEBUG nova.network.neutron [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Refreshing network info cache for port 535fa511-22ab-4762-80f3-e92464fddeb5 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1554.188637] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15f5a7a-fc35-49d1-9be0-5bdec8f54dda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.198919] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777682, 'name': CreateVM_Task, 'duration_secs': 0.455937} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.203021] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1554.203021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.203021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.203021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1554.203021] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-233bcaa4-4221-4ab3-bd1b-3ff9bdf14274 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.206627] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8ad346-8253-4ba3-85d2-e9b951254282 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.211527] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for the task: (returnval){ [ 1554.211527] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7e562-c2bd-5b62-4d0f-236307b33492" [ 1554.211527] env[62619]: _type = "Task" [ 1554.211527] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.247582] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd444592-ff97-499d-a397-cd0732618044 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.257266] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52831fdd-65d4-cd99-bff2-11a53b437013, 'name': SearchDatastore_Task, 'duration_secs': 0.015473} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.257495] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7e562-c2bd-5b62-4d0f-236307b33492, 'name': SearchDatastore_Task, 'duration_secs': 0.018084} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.259010] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.259269] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1554.259579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.259833] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1554.259958] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.260109] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.260275] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1554.267180] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20f5eb10-ded9-4b12-be18-45c3c54efa2c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.271558] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2fa3dd4-14ea-487e-bce1-6531b96f5ec8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.275401] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25baf52a-8904-4ffb-b5d4-b3764312f3fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.292420] env[62619]: DEBUG nova.compute.provider_tree [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.294605] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1554.295086] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1554.295615] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1554.295615] env[62619]: value = "task-1777683" [ 1554.295615] env[62619]: _type = "Task" [ 1554.295615] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.296302] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c7a9ba5-297f-4078-9561-0ce7a5a252da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.306055] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for the task: (returnval){ [ 1554.306055] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e5cc9c-44a6-aa40-5c34-9143b789fa92" [ 1554.306055] env[62619]: _type = "Task" [ 1554.306055] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.309683] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.318225] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e5cc9c-44a6-aa40-5c34-9143b789fa92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.376680] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2299f3db-4242-4d1f-b2b4-babeeadf1783 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.567s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.436986] env[62619]: DEBUG nova.compute.manager [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1554.440387] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.473756] env[62619]: DEBUG oslo_vmware.api [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777679, 'name': PowerOnVM_Task, 'duration_secs': 0.628466} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.473756] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1554.473954] env[62619]: INFO nova.compute.manager [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Took 9.72 seconds to spawn the instance on the hypervisor. [ 1554.474148] env[62619]: DEBUG nova.compute.manager [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1554.474926] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e87941-631d-4722-b4d4-fedb34122ccd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.652590] env[62619]: DEBUG oslo_vmware.api [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777678, 'name': RemoveSnapshot_Task, 'duration_secs': 1.268081} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.653032] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1554.653385] env[62619]: INFO nova.compute.manager [None req-445751f6-3748-4fa7-9630-6c42c0dc3996 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Took 17.38 seconds to snapshot the instance on the hypervisor. [ 1554.709019] env[62619]: DEBUG nova.compute.manager [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1554.737194] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1554.737194] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1554.737194] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1554.737194] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1554.737194] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1554.737194] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1554.738300] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1554.738639] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1554.738943] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1554.739299] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1554.739792] env[62619]: DEBUG nova.virt.hardware [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1554.742071] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d80e493-76ec-48c7-ab3d-ddac0e743da7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.755496] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa70f139-9509-49d9-80a7-fdf0a1bc4f50 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.782954] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "fb231b38-950e-4c86-bfe5-4c10a304910f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.782954] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "fb231b38-950e-4c86-bfe5-4c10a304910f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.783251] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "fb231b38-950e-4c86-bfe5-4c10a304910f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.783411] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "fb231b38-950e-4c86-bfe5-4c10a304910f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.783517] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "fb231b38-950e-4c86-bfe5-4c10a304910f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.786176] env[62619]: INFO nova.compute.manager [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Terminating instance [ 1554.798227] env[62619]: DEBUG nova.scheduler.client.report [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1554.814057] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777683, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.828541] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e5cc9c-44a6-aa40-5c34-9143b789fa92, 'name': SearchDatastore_Task, 'duration_secs': 0.019181} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.829754] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfd26c9a-8ee7-49cf-bca3-004b8195e001 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.838243] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for the task: (returnval){ [ 1554.838243] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e27776-eac9-dcad-e225-e8e56aa9c258" [ 1554.838243] env[62619]: _type = "Task" [ 1554.838243] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.848956] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e27776-eac9-dcad-e225-e8e56aa9c258, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.866944] env[62619]: DEBUG oslo_vmware.rw_handles [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521d0e36-df5a-1109-9df4-bd2ac6551d4f/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1554.868283] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afd7876-0031-4e92-aebf-74cc425e4bfb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.876480] env[62619]: DEBUG oslo_vmware.rw_handles [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521d0e36-df5a-1109-9df4-bd2ac6551d4f/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1554.876795] env[62619]: ERROR oslo_vmware.rw_handles [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521d0e36-df5a-1109-9df4-bd2ac6551d4f/disk-0.vmdk due to incomplete transfer. [ 1554.876840] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-397b21e8-6392-4fec-9186-a2bb65447fb6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.884838] env[62619]: DEBUG oslo_vmware.rw_handles [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521d0e36-df5a-1109-9df4-bd2ac6551d4f/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1554.885092] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Uploaded image cce1f626-0c24-4d2e-b753-64401418a36e to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1554.886893] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1554.887383] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cb368dd5-a64f-4abd-9d5b-a89a46e17064 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.893565] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1554.893565] env[62619]: value = "task-1777684" [ 1554.893565] env[62619]: _type = "Task" [ 1554.893565] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.903748] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777684, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.964895] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.992492] env[62619]: INFO nova.compute.manager [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Took 50.70 seconds to build instance. [ 1555.170833] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquiring lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.171167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.173552] env[62619]: DEBUG nova.network.neutron [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Updated VIF entry in instance network info cache for port 535fa511-22ab-4762-80f3-e92464fddeb5. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1555.174086] env[62619]: DEBUG nova.network.neutron [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Updating instance_info_cache with network_info: [{"id": "535fa511-22ab-4762-80f3-e92464fddeb5", "address": "fa:16:3e:41:e6:05", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap535fa511-22", "ovs_interfaceid": "535fa511-22ab-4762-80f3-e92464fddeb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.290704] env[62619]: DEBUG nova.compute.manager [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1555.290965] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1555.291883] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4398670e-6a26-46ca-bafe-46390e61497a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.300297] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1555.300649] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11466405-7168-48e0-b362-cc8f9032f3c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.303077] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.303495] env[62619]: DEBUG nova.compute.manager [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1555.310015] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.611s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.310320] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.312297] env[62619]: DEBUG oslo_concurrency.lockutils [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.141s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.312663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.314321] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.010s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.314649] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.316694] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.929s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.318198] env[62619]: INFO nova.compute.claims [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1555.327946] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.796176} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.328943] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1555.329332] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1555.329760] env[62619]: DEBUG oslo_vmware.api [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1555.329760] env[62619]: value = "task-1777685" [ 1555.329760] env[62619]: _type = "Task" [ 1555.329760] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.330072] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fae62136-3ac5-4e95-adda-5db33f60de12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.340220] env[62619]: DEBUG oslo_vmware.api [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.341789] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1555.341789] env[62619]: value = "task-1777686" [ 1555.341789] env[62619]: _type = "Task" [ 1555.341789] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.356186] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e27776-eac9-dcad-e225-e8e56aa9c258, 'name': SearchDatastore_Task, 'duration_secs': 0.058329} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.360197] env[62619]: INFO nova.scheduler.client.report [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Deleted allocations for instance a802534f-1766-4ea9-9188-803ef197d775 [ 1555.361973] env[62619]: INFO nova.scheduler.client.report [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleted allocations for instance ca5f5f6b-5303-4af4-adaa-e4aac72a90f8 [ 1555.366471] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.366828] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] fab3d689-9e30-4afd-b0cc-49c6d2870c50/fab3d689-9e30-4afd-b0cc-49c6d2870c50.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1555.367339] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777686, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.370181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.371047] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1555.371047] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03baac19-a912-41af-b83a-395c8dce7364 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.373347] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93f266ac-a3db-4a57-a7c8-868d69631dce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.377632] env[62619]: INFO nova.scheduler.client.report [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleted allocations for instance 78c7a111-d497-4114-b4f4-07319e6e7df2 [ 1555.385431] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1555.385720] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1555.387392] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for the task: (returnval){ [ 1555.387392] env[62619]: value = "task-1777687" [ 1555.387392] env[62619]: _type = "Task" [ 1555.387392] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.388274] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5349da7-7864-45a5-8e8f-bc7a7128d23d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.400554] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1555.400554] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db779d-8c3e-8309-8088-5c155dd532e7" [ 1555.400554] env[62619]: _type = "Task" [ 1555.400554] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.405500] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777687, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.412242] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777684, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.417389] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db779d-8c3e-8309-8088-5c155dd532e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.495690] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de597a29-15f4-44ec-94b6-5cb039e307e2 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "0a80942c-eb86-480b-ab7b-33112dd90d28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.541s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.676443] env[62619]: DEBUG oslo_concurrency.lockutils [req-cbdbc070-0d8f-4112-9b25-fc0c4c924699 req-6c9f7dcb-27d7-4647-84ca-59ea5df9aba3 service nova] Releasing lock "refresh_cache-0a80942c-eb86-480b-ab7b-33112dd90d28" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.677092] env[62619]: DEBUG nova.compute.manager [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1555.809082] env[62619]: DEBUG nova.compute.utils [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1555.810552] env[62619]: DEBUG nova.compute.manager [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1555.810749] env[62619]: DEBUG nova.network.neutron [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1555.846153] env[62619]: DEBUG oslo_vmware.api [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777685, 'name': PowerOffVM_Task, 'duration_secs': 0.428522} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.849487] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1555.849792] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1555.850968] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d01a9c7-66c5-4a8b-a044-f62a4d73b230 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.858223] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777686, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.222751} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.858557] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1555.859757] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37d5a37-b078-432f-ba63-b1b507e4dabf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.889219] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1555.889219] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e5e47f15-c19a-4476-941e-377d5b8808dc tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "ca5f5f6b-5303-4af4-adaa-e4aac72a90f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.440s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.890277] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3604c40e-f2d4-4425-a044-ed84c6998f54 tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "a802534f-1766-4ea9-9188-803ef197d775" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.712s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.894147] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85fd68c6-ffe7-4d96-9ada-6e99fdfec1cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.909495] env[62619]: DEBUG oslo_concurrency.lockutils [None req-18a690ec-980f-4e1e-a2ea-1006eaa98e01 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "78c7a111-d497-4114-b4f4-07319e6e7df2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.396s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.917712] env[62619]: DEBUG nova.policy [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a13900f0213424aaa6d2c9fa1dc3df4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3be1c72bfb5c4cfd9c45ab817384e11b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1555.932505] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1555.932776] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1555.932997] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Deleting the datastore file [datastore1] fb231b38-950e-4c86-bfe5-4c10a304910f {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1555.934518] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1a05bf9-603c-4945-80e9-a91030b41508 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.936663] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1555.936663] env[62619]: value = "task-1777689" [ 1555.936663] env[62619]: _type = "Task" [ 1555.936663] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.948138] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777684, 'name': Destroy_Task, 'duration_secs': 1.012573} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.948414] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777687, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.952442] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Destroyed the VM [ 1555.952702] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1555.957933] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-56dd69a2-8f96-4812-b783-0f3031bd4f08 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.960291] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db779d-8c3e-8309-8088-5c155dd532e7, 'name': SearchDatastore_Task, 'duration_secs': 0.021602} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.960631] env[62619]: DEBUG oslo_vmware.api [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1555.960631] env[62619]: value = "task-1777690" [ 1555.960631] env[62619]: _type = "Task" [ 1555.960631] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.962196] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e506ccff-8d89-4c08-b0f2-9ef357c87396 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.972449] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1555.972449] env[62619]: value = "task-1777691" [ 1555.972449] env[62619]: _type = "Task" [ 1555.972449] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.972647] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777689, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.982078] env[62619]: DEBUG oslo_vmware.api [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777690, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.982457] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1555.982457] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b4ffb-74ed-3ac7-7eb7-6bd3e65769b1" [ 1555.982457] env[62619]: _type = "Task" [ 1555.982457] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.989766] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777691, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.996179] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b4ffb-74ed-3ac7-7eb7-6bd3e65769b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.199260] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.240366] env[62619]: DEBUG nova.network.neutron [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Successfully updated port: 52679af0-12c1-41ec-927a-590d8d45fce0 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1556.317098] env[62619]: DEBUG nova.compute.manager [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1556.423783] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777687, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722175} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.424250] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] fab3d689-9e30-4afd-b0cc-49c6d2870c50/fab3d689-9e30-4afd-b0cc-49c6d2870c50.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1556.426271] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1556.426271] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81fcd83e-fda4-43e5-be5d-5e7104347042 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.433618] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for the task: (returnval){ [ 1556.433618] env[62619]: value = "task-1777692" [ 1556.433618] env[62619]: _type = "Task" [ 1556.433618] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.447980] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777692, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.462199] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777689, 'name': ReconfigVM_Task, 'duration_secs': 0.366941} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.462471] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Reconfigured VM instance instance-00000006 to attach disk [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17/ac03bcf3-61df-4557-8018-0ad54ef30f17.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1556.467482] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6707715d-bf53-4ac2-964e-fb6848dd1fe5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.476814] env[62619]: DEBUG oslo_vmware.api [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777690, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.388415} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.481748] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1556.482218] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1556.482437] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1556.482789] env[62619]: INFO nova.compute.manager [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1556.483131] env[62619]: DEBUG oslo.service.loopingcall [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1556.483529] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1556.483529] env[62619]: value = "task-1777693" [ 1556.483529] env[62619]: _type = "Task" [ 1556.483529] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.486316] env[62619]: DEBUG nova.compute.manager [-] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1556.486413] env[62619]: DEBUG nova.network.neutron [-] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1556.499222] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777691, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.507980] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b4ffb-74ed-3ac7-7eb7-6bd3e65769b1, 'name': SearchDatastore_Task, 'duration_secs': 0.066472} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.517666] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.517990] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1f86b805-0fde-4bda-9a94-d440a670e23c/1f86b805-0fde-4bda-9a94-d440a670e23c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1556.518374] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777693, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.518766] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1d182a5-da86-49dd-9dcc-6914f5383851 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.526084] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1556.526084] env[62619]: value = "task-1777694" [ 1556.526084] env[62619]: _type = "Task" [ 1556.526084] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.535425] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.577994] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "a6ba8114-0261-4894-98c0-9e0360f6d256" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.578627] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "a6ba8114-0261-4894-98c0-9e0360f6d256" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.742384] env[62619]: DEBUG nova.network.neutron [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Successfully created port: 77589fc3-af02-4235-859e-fbf8a8322155 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1556.745097] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.745321] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.745531] env[62619]: DEBUG nova.network.neutron [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1556.822676] env[62619]: INFO nova.virt.block_device [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Booting with volume da513efd-d6e1-4db5-90d2-52a5be9aa233 at /dev/sda [ 1556.873495] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3390a74-f8cd-4189-bab1-ae4a46db89c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.885228] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6680399-2c7c-46c6-88b1-595b0a260c12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.892071] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-761d6395-b56b-41ff-a63d-bfe9e59d3a66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.926378] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac65bc2-06e8-4d3f-9a90-a7b04b0b0f93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.934055] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed2b86b-b53a-412c-916c-7fbe581b7b02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.953596] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc10e55-5600-45b1-8547-c76ce6dcfde9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.978950] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777692, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113017} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.979696] env[62619]: DEBUG nova.compute.provider_tree [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1556.998762] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1557.008449] env[62619]: DEBUG nova.scheduler.client.report [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1557.013913] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62612162-84fe-4428-8d86-81f934153488 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.017309] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.017552] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.021575] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39d00157-2335-4acd-a464-51150e3711f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.059510] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] fab3d689-9e30-4afd-b0cc-49c6d2870c50/fab3d689-9e30-4afd-b0cc-49c6d2870c50.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1557.059968] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777693, 'name': Rename_Task, 'duration_secs': 0.156589} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.060649] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777691, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.061776] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba81c9ef-4856-478f-b6c0-cd188e351069 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.078852] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1557.082248] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1b876f-aaae-463b-8846-178b6b61f2e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.097982] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d45162b8-54f7-4849-8c5a-e80202d31738 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.099977] env[62619]: DEBUG nova.compute.manager [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1557.103152] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777694, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.109806] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1557.109806] env[62619]: value = "task-1777696" [ 1557.109806] env[62619]: _type = "Task" [ 1557.109806] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.110963] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for the task: (returnval){ [ 1557.110963] env[62619]: value = "task-1777695" [ 1557.110963] env[62619]: _type = "Task" [ 1557.110963] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.124638] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777695, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.147803] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777696, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.150298] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb69d16-876e-4f79-ba06-7b635685443c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.157297] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02508f6-864d-4ae6-a1b2-ccd50b3a6f2a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.172463] env[62619]: DEBUG nova.virt.block_device [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Updating existing volume attachment record: 0dcb6059-b07c-4a90-a6f4-c7f368ded7de {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1557.358585] env[62619]: DEBUG nova.network.neutron [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1557.491127] env[62619]: DEBUG oslo_vmware.api [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777691, 'name': RemoveSnapshot_Task, 'duration_secs': 1.233686} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.491799] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1557.494475] env[62619]: INFO nova.compute.manager [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Took 17.60 seconds to snapshot the instance on the hypervisor. [ 1557.526042] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.208s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.526042] env[62619]: DEBUG nova.compute.manager [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1557.534351] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.024s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.537229] env[62619]: INFO nova.compute.claims [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1557.557129] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777694, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65739} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.557129] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1f86b805-0fde-4bda-9a94-d440a670e23c/1f86b805-0fde-4bda-9a94-d440a670e23c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1557.557129] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1557.557129] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d76a7c9a-08f7-4ec0-91d7-cb477b7dbef7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.565029] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1557.565029] env[62619]: value = "task-1777697" [ 1557.565029] env[62619]: _type = "Task" [ 1557.565029] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.574926] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.630777] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777696, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.635379] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777695, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.636463] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.830298] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Received event network-vif-deleted-5ce94831-5f9c-4556-936d-e71ee8d04f45 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1557.830700] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Received event network-vif-plugged-7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1557.831095] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquiring lock "1f86b805-0fde-4bda-9a94-d440a670e23c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.831275] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Lock "1f86b805-0fde-4bda-9a94-d440a670e23c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.831436] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Lock "1f86b805-0fde-4bda-9a94-d440a670e23c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.831666] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] No waiting events found dispatching network-vif-plugged-7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1557.831899] env[62619]: WARNING nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Received unexpected event network-vif-plugged-7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575 for instance with vm_state building and task_state spawning. [ 1557.832159] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Received event network-changed-7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1557.832331] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Refreshing instance network info cache due to event network-changed-7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1557.832687] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquiring lock "refresh_cache-1f86b805-0fde-4bda-9a94-d440a670e23c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.832874] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquired lock "refresh_cache-1f86b805-0fde-4bda-9a94-d440a670e23c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.833066] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Refreshing network info cache for port 7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1557.836512] env[62619]: DEBUG nova.network.neutron [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Updating instance_info_cache with network_info: [{"id": "52679af0-12c1-41ec-927a-590d8d45fce0", "address": "fa:16:3e:35:73:8e", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52679af0-12", "ovs_interfaceid": "52679af0-12c1-41ec-927a-590d8d45fce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.036922] env[62619]: DEBUG nova.compute.utils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1558.045858] env[62619]: DEBUG nova.compute.manager [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1558.045858] env[62619]: DEBUG nova.network.neutron [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1558.071401] env[62619]: DEBUG nova.compute.manager [None req-8fd7101c-07cf-42a7-a1ba-49d99507d645 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Found 1 images (rotation: 2) {{(pid=62619) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4924}} [ 1558.083023] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067794} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.083023] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1558.083023] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf08eff1-d204-4fa8-bacf-02e42948c3ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.108365] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 1f86b805-0fde-4bda-9a94-d440a670e23c/1f86b805-0fde-4bda-9a94-d440a670e23c.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1558.109553] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10461916-b1f7-481a-a5bb-31295a0cd372 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.140065] env[62619]: DEBUG oslo_vmware.api [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777696, 'name': PowerOnVM_Task, 'duration_secs': 0.747045} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.141543] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1558.141645] env[62619]: DEBUG nova.compute.manager [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1558.141894] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777695, 'name': ReconfigVM_Task, 'duration_secs': 0.577983} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.142158] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1558.142158] env[62619]: value = "task-1777698" [ 1558.142158] env[62619]: _type = "Task" [ 1558.142158] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.142859] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e3d439-33c9-489b-9e1d-12f7554167f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.145436] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Reconfigured VM instance instance-00000037 to attach disk [datastore1] fab3d689-9e30-4afd-b0cc-49c6d2870c50/fab3d689-9e30-4afd-b0cc-49c6d2870c50.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1558.146454] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4012682-469c-4ee2-9dfe-950a09a74fc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.163025] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777698, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.163025] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for the task: (returnval){ [ 1558.163025] env[62619]: value = "task-1777699" [ 1558.163025] env[62619]: _type = "Task" [ 1558.163025] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.171314] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777699, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.341623] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.341940] env[62619]: DEBUG nova.compute.manager [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Instance network_info: |[{"id": "52679af0-12c1-41ec-927a-590d8d45fce0", "address": "fa:16:3e:35:73:8e", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52679af0-12", "ovs_interfaceid": "52679af0-12c1-41ec-927a-590d8d45fce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1558.342403] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:73:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52679af0-12c1-41ec-927a-590d8d45fce0', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1558.352462] env[62619]: DEBUG oslo.service.loopingcall [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1558.352698] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1558.352922] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97e035da-f48b-445d-a382-c9e0553c0d05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.380839] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1558.380839] env[62619]: value = "task-1777700" [ 1558.380839] env[62619]: _type = "Task" [ 1558.380839] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.386816] env[62619]: DEBUG nova.policy [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6beb0577d52454a82262ea46ab3e796', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf557954d79d4bb1939f6e65d4ed00b5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1558.393381] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777700, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.437219] env[62619]: DEBUG nova.network.neutron [-] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.543681] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Acquiring lock "ed34ae20-a891-45aa-8124-f36f264937f8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.544095] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Lock "ed34ae20-a891-45aa-8124-f36f264937f8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.549805] env[62619]: DEBUG nova.compute.manager [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1558.673643] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777698, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.680059] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "7217d898-54ee-46ed-88fa-959c38e988e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.680312] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "7217d898-54ee-46ed-88fa-959c38e988e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.680509] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "7217d898-54ee-46ed-88fa-959c38e988e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.680701] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "7217d898-54ee-46ed-88fa-959c38e988e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.680922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "7217d898-54ee-46ed-88fa-959c38e988e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.688110] env[62619]: INFO nova.compute.manager [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Terminating instance [ 1558.689550] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.698670] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777699, 'name': Rename_Task, 'duration_secs': 0.40855} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.701659] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1558.702630] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2bfbb68-baa7-420a-be67-9edbafabaf4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.709569] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for the task: (returnval){ [ 1558.709569] env[62619]: value = "task-1777701" [ 1558.709569] env[62619]: _type = "Task" [ 1558.709569] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.721035] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777701, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.894391] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777700, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.941612] env[62619]: INFO nova.compute.manager [-] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Took 2.45 seconds to deallocate network for instance. [ 1559.056038] env[62619]: INFO nova.compute.manager [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Detaching volume afcc9063-7d91-4cc2-8215-be590afb876a [ 1559.108728] env[62619]: INFO nova.virt.block_device [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Attempting to driver detach volume afcc9063-7d91-4cc2-8215-be590afb876a from mountpoint /dev/sdb [ 1559.108960] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1559.109162] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369030', 'volume_id': 'afcc9063-7d91-4cc2-8215-be590afb876a', 'name': 'volume-afcc9063-7d91-4cc2-8215-be590afb876a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ed34ae20-a891-45aa-8124-f36f264937f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'afcc9063-7d91-4cc2-8215-be590afb876a', 'serial': 'afcc9063-7d91-4cc2-8215-be590afb876a'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1559.110370] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe2fc20-fc8a-4045-a18f-2344cf2e9c7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.145857] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa15d76a-b2a5-460b-afef-ce75e77d9a33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.150157] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Updated VIF entry in instance network info cache for port 7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1559.150545] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Updating instance_info_cache with network_info: [{"id": "7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575", "address": "fa:16:3e:fd:35:d7", "network": {"id": "65fab282-c9d0-4538-b853-bc118e26fddd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2136510104", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e9d4c42-dc", "ovs_interfaceid": "7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4603d400-5a47-4629-9915-a5315eeca473", "address": "fa:16:3e:f8:73:07", "network": {"id": "bc8f0b8b-aaf4-4e38-ad5a-c1e19b3f4a04", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-796388189", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4603d400-5a", "ovs_interfaceid": "4603d400-5a47-4629-9915-a5315eeca473", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.154896] env[62619]: DEBUG oslo_concurrency.lockutils [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "0272ca2a-e9ff-4af5-8120-278a82d74627" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.155145] env[62619]: DEBUG oslo_concurrency.lockutils [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "0272ca2a-e9ff-4af5-8120-278a82d74627" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.155755] env[62619]: DEBUG oslo_concurrency.lockutils [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "0272ca2a-e9ff-4af5-8120-278a82d74627-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.156147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "0272ca2a-e9ff-4af5-8120-278a82d74627-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.156672] env[62619]: DEBUG oslo_concurrency.lockutils [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "0272ca2a-e9ff-4af5-8120-278a82d74627-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.164184] env[62619]: INFO nova.compute.manager [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Terminating instance [ 1559.166213] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3971c8-bc96-40ff-ac9a-d76744583f12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.177039] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777698, 'name': ReconfigVM_Task, 'duration_secs': 0.75548} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.178121] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 1f86b805-0fde-4bda-9a94-d440a670e23c/1f86b805-0fde-4bda-9a94-d440a670e23c.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1559.179040] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a691516-3e0f-4a5a-b5f0-fdd4804f69ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.198321] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "refresh_cache-7217d898-54ee-46ed-88fa-959c38e988e7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.198594] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquired lock "refresh_cache-7217d898-54ee-46ed-88fa-959c38e988e7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.198664] env[62619]: DEBUG nova.network.neutron [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1559.201937] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b04c3c-583f-4369-9043-a4288dec94be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.205627] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee08edbc-a9b6-4aad-89e4-1e59b9bb2273 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.210266] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1559.210266] env[62619]: value = "task-1777702" [ 1559.210266] env[62619]: _type = "Task" [ 1559.210266] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.230584] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] The volume has not been displaced from its original location: [datastore1] volume-afcc9063-7d91-4cc2-8215-be590afb876a/volume-afcc9063-7d91-4cc2-8215-be590afb876a.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1559.235866] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Reconfiguring VM instance instance-0000000b to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1559.239909] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce97eb18-a961-4d6e-961b-7d1f765ed085 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.253654] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1d00f8-ea03-4ae7-8c49-e2cbeaf91327 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.257033] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777701, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.260616] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777702, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.292223] env[62619]: DEBUG nova.compute.manager [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1559.292754] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1559.292953] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1559.293112] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1559.293281] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1559.293440] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1559.293572] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1559.293797] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1559.294168] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1559.294263] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1559.294372] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1559.295122] env[62619]: DEBUG nova.virt.hardware [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1559.296716] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d797ce1-caf6-448e-8f61-b3d399c16173 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.299662] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fadc4d3b-e5a0-40f3-adba-646db076a9d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.302087] env[62619]: DEBUG oslo_vmware.api [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Waiting for the task: (returnval){ [ 1559.302087] env[62619]: value = "task-1777703" [ 1559.302087] env[62619]: _type = "Task" [ 1559.302087] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.311202] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510a52d7-50ed-40a3-b7c8-c1ff70a98c85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.315515] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9118f7-7db9-4df7-979a-76ebf31233e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.322117] env[62619]: DEBUG oslo_vmware.api [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Task: {'id': task-1777703, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.339581] env[62619]: DEBUG nova.compute.provider_tree [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.391628] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777700, 'name': CreateVM_Task, 'duration_secs': 0.51624} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.392055] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1559.392431] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.392583] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.392888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1559.393156] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3da27e9-3ff6-459c-b993-72e07d5de226 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.397741] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1559.397741] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52269caa-f8e5-2bcc-3ce3-3967e5f2a5eb" [ 1559.397741] env[62619]: _type = "Task" [ 1559.397741] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.405471] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52269caa-f8e5-2bcc-3ce3-3967e5f2a5eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.454482] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.571066] env[62619]: DEBUG nova.network.neutron [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Successfully created port: a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1559.574298] env[62619]: DEBUG nova.compute.manager [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1559.581307] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquiring lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.581598] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.581770] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquiring lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.582524] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.582524] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.586383] env[62619]: INFO nova.compute.manager [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Terminating instance [ 1559.604850] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1559.607662] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1559.607662] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1559.607662] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1559.607662] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1559.607662] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1559.607662] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1559.607662] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1559.607662] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1559.607662] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1559.607662] env[62619]: DEBUG nova.virt.hardware [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1559.608714] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e1fc5a-77f0-43d2-82f7-c5b2e4ff8f15 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.618312] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32aeed8-659e-4490-9ca9-5fafff7cc2af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.663745] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Releasing lock "refresh_cache-1f86b805-0fde-4bda-9a94-d440a670e23c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.664082] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Received event network-vif-plugged-4603d400-5a47-4629-9915-a5315eeca473 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1559.664290] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquiring lock "1f86b805-0fde-4bda-9a94-d440a670e23c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.664492] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Lock "1f86b805-0fde-4bda-9a94-d440a670e23c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.664650] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Lock "1f86b805-0fde-4bda-9a94-d440a670e23c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.664845] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] No waiting events found dispatching network-vif-plugged-4603d400-5a47-4629-9915-a5315eeca473 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1559.665037] env[62619]: WARNING nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Received unexpected event network-vif-plugged-4603d400-5a47-4629-9915-a5315eeca473 for instance with vm_state building and task_state spawning. [ 1559.665208] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Received event network-changed-4603d400-5a47-4629-9915-a5315eeca473 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1559.665356] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Refreshing instance network info cache due to event network-changed-4603d400-5a47-4629-9915-a5315eeca473. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1559.665530] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquiring lock "refresh_cache-1f86b805-0fde-4bda-9a94-d440a670e23c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.665660] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquired lock "refresh_cache-1f86b805-0fde-4bda-9a94-d440a670e23c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.665815] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Refreshing network info cache for port 4603d400-5a47-4629-9915-a5315eeca473 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1559.678795] env[62619]: DEBUG nova.compute.manager [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1559.679017] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1559.680553] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc85c341-6db2-4240-b09e-f2539a68a4db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.694333] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1559.694333] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0908fa0e-54db-43a1-8298-cdff99f4fd29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.698448] env[62619]: DEBUG oslo_vmware.api [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1559.698448] env[62619]: value = "task-1777704" [ 1559.698448] env[62619]: _type = "Task" [ 1559.698448] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.707211] env[62619]: DEBUG oslo_vmware.api [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777704, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.723009] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777702, 'name': Rename_Task, 'duration_secs': 0.279787} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.725637] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1559.725913] env[62619]: DEBUG oslo_vmware.api [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777701, 'name': PowerOnVM_Task, 'duration_secs': 0.851515} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.726146] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24097d82-2e3f-4018-ac81-9ff544aff66b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.727669] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1559.727913] env[62619]: INFO nova.compute.manager [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Took 8.58 seconds to spawn the instance on the hypervisor. [ 1559.728064] env[62619]: DEBUG nova.compute.manager [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1559.729319] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefbe328-995d-4e26-812f-71cfa5224e5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.739115] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1559.739115] env[62619]: value = "task-1777705" [ 1559.739115] env[62619]: _type = "Task" [ 1559.739115] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.750566] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777705, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.783747] env[62619]: DEBUG nova.network.neutron [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1559.813294] env[62619]: DEBUG oslo_vmware.api [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Task: {'id': task-1777703, 'name': ReconfigVM_Task, 'duration_secs': 0.357315} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.813731] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Reconfigured VM instance instance-0000000b to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1559.820188] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a187bc06-0a85-45fa-b15a-e3eed0269c3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.842339] env[62619]: DEBUG nova.scheduler.client.report [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1559.847012] env[62619]: DEBUG oslo_vmware.api [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Waiting for the task: (returnval){ [ 1559.847012] env[62619]: value = "task-1777706" [ 1559.847012] env[62619]: _type = "Task" [ 1559.847012] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.865200] env[62619]: DEBUG oslo_vmware.api [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Task: {'id': task-1777706, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.908165] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52269caa-f8e5-2bcc-3ce3-3967e5f2a5eb, 'name': SearchDatastore_Task, 'duration_secs': 0.014305} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.908719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.909478] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1559.909478] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.909478] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.909793] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1559.910128] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc208cc3-a612-45c8-9714-0b4fd8009176 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.921238] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1559.921444] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1559.925042] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e44c086-eeb7-4e7d-a030-a2450f0b6b2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.928816] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1559.928816] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b49057-4f87-591b-aa08-08ef3a561401" [ 1559.928816] env[62619]: _type = "Task" [ 1559.928816] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.937743] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b49057-4f87-591b-aa08-08ef3a561401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.027627] env[62619]: DEBUG nova.network.neutron [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.090507] env[62619]: DEBUG nova.compute.manager [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1560.090812] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1560.091637] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ee1454-1000-4c5a-a60c-bb03cef0eb0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.100145] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1560.100145] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88ee981a-3ad5-4fd7-aafb-6f21601a486b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.106632] env[62619]: DEBUG oslo_vmware.api [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for the task: (returnval){ [ 1560.106632] env[62619]: value = "task-1777707" [ 1560.106632] env[62619]: _type = "Task" [ 1560.106632] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.125426] env[62619]: DEBUG oslo_vmware.api [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.147404] env[62619]: DEBUG nova.network.neutron [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Successfully updated port: 77589fc3-af02-4235-859e-fbf8a8322155 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1560.210387] env[62619]: DEBUG oslo_vmware.api [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777704, 'name': PowerOffVM_Task, 'duration_secs': 0.246855} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.210387] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1560.217425] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1560.217425] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0404fc80-d31f-441e-9f6b-e13c650b5c97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.254730] env[62619]: INFO nova.compute.manager [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Took 42.66 seconds to build instance. [ 1560.263473] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777705, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.351024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.815s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.351024] env[62619]: DEBUG nova.compute.manager [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1560.353086] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.009s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.353570] env[62619]: DEBUG nova.objects.instance [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lazy-loading 'resources' on Instance uuid 39adf15c-f77e-4737-aeeb-258887007b9a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1560.365080] env[62619]: DEBUG oslo_vmware.api [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Task: {'id': task-1777706, 'name': ReconfigVM_Task, 'duration_secs': 0.161387} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.365569] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369030', 'volume_id': 'afcc9063-7d91-4cc2-8215-be590afb876a', 'name': 'volume-afcc9063-7d91-4cc2-8215-be590afb876a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ed34ae20-a891-45aa-8124-f36f264937f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'afcc9063-7d91-4cc2-8215-be590afb876a', 'serial': 'afcc9063-7d91-4cc2-8215-be590afb876a'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1560.440359] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b49057-4f87-591b-aa08-08ef3a561401, 'name': SearchDatastore_Task, 'duration_secs': 0.01201} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.441291] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cfb1d01-3269-4004-bf6e-1022287c7678 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.449295] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1560.449295] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529c38f6-604d-37be-3d08-3351126ae731" [ 1560.449295] env[62619]: _type = "Task" [ 1560.449295] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.457144] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529c38f6-604d-37be-3d08-3351126ae731, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.531125] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Releasing lock "refresh_cache-7217d898-54ee-46ed-88fa-959c38e988e7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.531606] env[62619]: DEBUG nova.compute.manager [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1560.532669] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1560.533624] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca140c2-58e0-43ad-9831-6a2c83f282a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.542598] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1560.542856] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f32414c-9b2b-4edd-b02f-6c60c8707798 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.549257] env[62619]: DEBUG oslo_vmware.api [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1560.549257] env[62619]: value = "task-1777709" [ 1560.549257] env[62619]: _type = "Task" [ 1560.549257] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.558839] env[62619]: DEBUG oslo_vmware.api [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777709, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.626531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "0a80942c-eb86-480b-ab7b-33112dd90d28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.626531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "0a80942c-eb86-480b-ab7b-33112dd90d28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.626531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "0a80942c-eb86-480b-ab7b-33112dd90d28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.626531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "0a80942c-eb86-480b-ab7b-33112dd90d28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.626531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "0a80942c-eb86-480b-ab7b-33112dd90d28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.628296] env[62619]: DEBUG oslo_vmware.api [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777707, 'name': PowerOffVM_Task, 'duration_secs': 0.20359} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.636016] env[62619]: INFO nova.compute.manager [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Terminating instance [ 1560.638114] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1560.638383] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1560.644579] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7db526cc-5aa5-464f-86ad-8ff4f32409f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.649951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Acquiring lock "refresh_cache-b1c3c213-599d-4cab-8224-d87467d774c9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.650090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Acquired lock "refresh_cache-b1c3c213-599d-4cab-8224-d87467d774c9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.650452] env[62619]: DEBUG nova.network.neutron [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1560.765112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.765112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.765112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1b373c8-2ebc-4cba-b7b7-06714b0c93e8 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.518s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.765112] env[62619]: DEBUG oslo_vmware.api [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777705, 'name': PowerOnVM_Task, 'duration_secs': 0.616964} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.766635] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1560.766860] env[62619]: INFO nova.compute.manager [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Took 13.27 seconds to spawn the instance on the hypervisor. [ 1560.767053] env[62619]: DEBUG nova.compute.manager [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1560.769943] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82a3df2-1112-40f0-bac0-0046ee7f70f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.799674] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1560.800255] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1560.801748] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Deleting the datastore file [datastore1] 79dfeb2b-06d0-45f1-b97e-10fa4f00d282 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1560.804868] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ae77416-a960-46af-a3f2-44263f059624 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.813616] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1560.813898] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1560.814103] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Deleting the datastore file [datastore1] 0272ca2a-e9ff-4af5-8120-278a82d74627 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1560.814382] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "0f925028-c376-438f-8a56-deaa23047199" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.814585] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "0f925028-c376-438f-8a56-deaa23047199" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.817083] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ec111ba-7ea7-4063-ac3b-e3d667ac9f9b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.821630] env[62619]: DEBUG oslo_vmware.api [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1560.821630] env[62619]: value = "task-1777712" [ 1560.821630] env[62619]: _type = "Task" [ 1560.821630] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.823543] env[62619]: DEBUG oslo_vmware.api [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for the task: (returnval){ [ 1560.823543] env[62619]: value = "task-1777711" [ 1560.823543] env[62619]: _type = "Task" [ 1560.823543] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.840534] env[62619]: DEBUG oslo_vmware.api [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.844913] env[62619]: DEBUG oslo_vmware.api [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777711, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.858439] env[62619]: DEBUG nova.compute.utils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1560.859877] env[62619]: DEBUG nova.compute.manager [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1560.859877] env[62619]: DEBUG nova.network.neutron [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1560.923693] env[62619]: DEBUG nova.policy [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91b100cc8f94b93af086dafebe29092', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c24c9d49d8d4104a0868f126eb3a26e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1560.930772] env[62619]: DEBUG nova.objects.instance [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Lazy-loading 'flavor' on Instance uuid ed34ae20-a891-45aa-8124-f36f264937f8 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1560.960993] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529c38f6-604d-37be-3d08-3351126ae731, 'name': SearchDatastore_Task, 'duration_secs': 0.029344} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.963783] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.964066] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ae37cae9-c82e-4775-8a8f-6bbf9108b0bd/ae37cae9-c82e-4775-8a8f-6bbf9108b0bd.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1560.964529] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a002c597-ab39-4825-a4e4-d6ff29526304 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.972332] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1560.972332] env[62619]: value = "task-1777713" [ 1560.972332] env[62619]: _type = "Task" [ 1560.972332] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.984900] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.060467] env[62619]: DEBUG oslo_vmware.api [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777709, 'name': PowerOffVM_Task, 'duration_secs': 0.177612} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.060467] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1561.060467] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1561.060467] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b98fb2f-7aa4-44b2-a69e-0de5aab1f75b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.087022] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1561.087022] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1561.087113] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Deleting the datastore file [datastore1] 7217d898-54ee-46ed-88fa-959c38e988e7 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1561.087301] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b63d76d2-bd3d-4569-917f-3d57a949ceef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.096927] env[62619]: DEBUG oslo_vmware.api [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for the task: (returnval){ [ 1561.096927] env[62619]: value = "task-1777715" [ 1561.096927] env[62619]: _type = "Task" [ 1561.096927] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.107623] env[62619]: DEBUG oslo_vmware.api [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777715, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.134317] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Updated VIF entry in instance network info cache for port 4603d400-5a47-4629-9915-a5315eeca473. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1561.134643] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Updating instance_info_cache with network_info: [{"id": "7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575", "address": "fa:16:3e:fd:35:d7", "network": {"id": "65fab282-c9d0-4538-b853-bc118e26fddd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2136510104", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e9d4c42-dc", "ovs_interfaceid": "7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4603d400-5a47-4629-9915-a5315eeca473", "address": "fa:16:3e:f8:73:07", "network": {"id": "bc8f0b8b-aaf4-4e38-ad5a-c1e19b3f4a04", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-796388189", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4603d400-5a", "ovs_interfaceid": "4603d400-5a47-4629-9915-a5315eeca473", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.150680] env[62619]: DEBUG nova.compute.manager [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1561.150935] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1561.152038] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6b7994-f8ae-4e68-bad8-7a944d184411 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.167534] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1561.168155] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12aca8ba-dc29-4122-a50e-e4ee157fd8fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.178161] env[62619]: DEBUG oslo_vmware.api [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1561.178161] env[62619]: value = "task-1777716" [ 1561.178161] env[62619]: _type = "Task" [ 1561.178161] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.187665] env[62619]: DEBUG oslo_vmware.api [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777716, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.265935] env[62619]: DEBUG nova.network.neutron [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1561.269143] env[62619]: DEBUG nova.compute.manager [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1561.298891] env[62619]: INFO nova.compute.manager [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Took 49.09 seconds to build instance. [ 1561.344176] env[62619]: DEBUG oslo_vmware.api [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180634} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.345043] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1561.345309] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1561.345378] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1561.345518] env[62619]: INFO nova.compute.manager [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1561.345764] env[62619]: DEBUG oslo.service.loopingcall [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.346362] env[62619]: DEBUG nova.compute.manager [-] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1561.346437] env[62619]: DEBUG nova.network.neutron [-] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1561.356829] env[62619]: DEBUG oslo_vmware.api [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Task: {'id': task-1777711, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179832} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.358063] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1561.358063] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1561.358063] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1561.358063] env[62619]: INFO nova.compute.manager [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1561.358481] env[62619]: DEBUG oslo.service.loopingcall [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.361352] env[62619]: DEBUG nova.compute.manager [-] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1561.361709] env[62619]: DEBUG nova.network.neutron [-] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1561.365519] env[62619]: DEBUG nova.compute.manager [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1561.479288] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3354b5d7-9041-4ccb-85b9-a8a00451aa61 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.487990] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777713, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.491412] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ad2c3a-3ec8-48f5-8512-78edc98e51e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.531924] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee2f56f-7ca6-4958-90f7-24d34ff63b5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.543422] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f783039-86fa-489b-92f9-d7c9353538ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.561676] env[62619]: DEBUG nova.compute.provider_tree [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.609729] env[62619]: DEBUG oslo_vmware.api [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Task: {'id': task-1777715, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18027} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.610269] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1561.610521] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1561.610708] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1561.610881] env[62619]: INFO nova.compute.manager [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1561.611220] env[62619]: DEBUG oslo.service.loopingcall [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.611345] env[62619]: DEBUG nova.compute.manager [-] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1561.612084] env[62619]: DEBUG nova.network.neutron [-] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1561.639050] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Releasing lock "refresh_cache-1f86b805-0fde-4bda-9a94-d440a670e23c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.639050] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Received event network-vif-plugged-35590c26-21c7-47f0-9a37-848ba413367f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1561.639050] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquiring lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.639050] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.639384] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.639384] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] No waiting events found dispatching network-vif-plugged-35590c26-21c7-47f0-9a37-848ba413367f {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1561.639484] env[62619]: WARNING nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Received unexpected event network-vif-plugged-35590c26-21c7-47f0-9a37-848ba413367f for instance with vm_state building and task_state spawning. [ 1561.639611] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Received event network-changed-35590c26-21c7-47f0-9a37-848ba413367f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1561.639803] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Refreshing instance network info cache due to event network-changed-35590c26-21c7-47f0-9a37-848ba413367f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1561.640044] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquiring lock "refresh_cache-fab3d689-9e30-4afd-b0cc-49c6d2870c50" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.640193] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquired lock "refresh_cache-fab3d689-9e30-4afd-b0cc-49c6d2870c50" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.640455] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Refreshing network info cache for port 35590c26-21c7-47f0-9a37-848ba413367f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1561.663172] env[62619]: DEBUG nova.network.neutron [-] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1561.686088] env[62619]: DEBUG nova.network.neutron [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Updating instance_info_cache with network_info: [{"id": "77589fc3-af02-4235-859e-fbf8a8322155", "address": "fa:16:3e:49:48:47", "network": {"id": "655928a1-c906-43b7-b355-a21b4db5f18b", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-689245540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3be1c72bfb5c4cfd9c45ab817384e11b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77589fc3-af", "ovs_interfaceid": "77589fc3-af02-4235-859e-fbf8a8322155", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.694333] env[62619]: DEBUG oslo_vmware.api [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777716, 'name': PowerOffVM_Task, 'duration_secs': 0.236107} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.694710] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1561.694820] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1561.695124] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d81630aa-5284-4a6f-8fa6-0f313d54afa6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.728154] env[62619]: DEBUG nova.compute.manager [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Received event network-changed-52679af0-12c1-41ec-927a-590d8d45fce0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1561.728739] env[62619]: DEBUG nova.compute.manager [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Refreshing instance network info cache due to event network-changed-52679af0-12c1-41ec-927a-590d8d45fce0. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1561.728739] env[62619]: DEBUG oslo_concurrency.lockutils [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] Acquiring lock "refresh_cache-ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.729780] env[62619]: DEBUG oslo_concurrency.lockutils [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] Acquired lock "refresh_cache-ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.729780] env[62619]: DEBUG nova.network.neutron [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Refreshing network info cache for port 52679af0-12c1-41ec-927a-590d8d45fce0 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1561.735474] env[62619]: DEBUG nova.network.neutron [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Successfully created port: 61e78327-dbb0-497b-be4b-14a0a1d8a5de {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1561.797952] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.800980] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a89c13a0-7d18-4723-8f0f-576e45a84cd9 tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "1f86b805-0fde-4bda-9a94-d440a670e23c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.193s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.880932] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1561.881281] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1561.881495] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleting the datastore file [datastore1] 0a80942c-eb86-480b-ab7b-33112dd90d28 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1561.882494] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61ca0b58-78fe-4b50-a532-a8085e7c22c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.890052] env[62619]: DEBUG oslo_vmware.api [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1561.890052] env[62619]: value = "task-1777718" [ 1561.890052] env[62619]: _type = "Task" [ 1561.890052] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.902128] env[62619]: DEBUG oslo_vmware.api [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.940261] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4f54452-3d95-45c7-91c0-98e45d5040c1 tempest-VolumesAssistedSnapshotsTest-45570677 tempest-VolumesAssistedSnapshotsTest-45570677-project-admin] Lock "ed34ae20-a891-45aa-8124-f36f264937f8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.395s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.984584] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80102} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.984980] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ae37cae9-c82e-4775-8a8f-6bbf9108b0bd/ae37cae9-c82e-4775-8a8f-6bbf9108b0bd.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1561.985274] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1561.985642] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-005cd49b-364a-4884-8820-3e05ff71ea9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.992190] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1561.992190] env[62619]: value = "task-1777719" [ 1561.992190] env[62619]: _type = "Task" [ 1561.992190] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.000307] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.067020] env[62619]: DEBUG nova.scheduler.client.report [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1562.165960] env[62619]: DEBUG nova.network.neutron [-] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.189646] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Releasing lock "refresh_cache-b1c3c213-599d-4cab-8224-d87467d774c9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.190045] env[62619]: DEBUG nova.compute.manager [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Instance network_info: |[{"id": "77589fc3-af02-4235-859e-fbf8a8322155", "address": "fa:16:3e:49:48:47", "network": {"id": "655928a1-c906-43b7-b355-a21b4db5f18b", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-689245540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3be1c72bfb5c4cfd9c45ab817384e11b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77589fc3-af", "ovs_interfaceid": "77589fc3-af02-4235-859e-fbf8a8322155", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1562.191155] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:48:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '450939f7-f74b-41f7-93f7-b4fde6a6fbed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77589fc3-af02-4235-859e-fbf8a8322155', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1562.199215] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Creating folder: Project (3be1c72bfb5c4cfd9c45ab817384e11b). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1562.199771] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab7953f4-bf8e-48ae-82f2-bfb56a558be6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.213296] env[62619]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1562.213922] env[62619]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62619) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1562.213996] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Folder already exists: Project (3be1c72bfb5c4cfd9c45ab817384e11b). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1562.214181] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Creating folder: Instances. Parent ref: group-v369000. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1562.214429] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-182547e9-75d2-4fc7-92c3-1746302f7bcf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.226822] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Created folder: Instances in parent group-v369000. [ 1562.227081] env[62619]: DEBUG oslo.service.loopingcall [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1562.227274] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1562.227499] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ce098ed-3074-4c00-8fa4-c98e5bc0ef52 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.253252] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1562.253252] env[62619]: value = "task-1777722" [ 1562.253252] env[62619]: _type = "Task" [ 1562.253252] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.262396] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777722, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.306820] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1562.377928] env[62619]: DEBUG nova.compute.manager [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1562.411400] env[62619]: DEBUG oslo_vmware.api [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.414442] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1562.414660] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1562.414818] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1562.414987] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1562.415135] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1562.415316] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1562.415573] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1562.415644] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1562.415798] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1562.415951] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1562.416698] env[62619]: DEBUG nova.virt.hardware [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1562.418189] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5874a7-210a-4b1e-abf2-891d6287d713 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.431976] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffcc144-902a-4c5a-9558-0b60f4537414 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.459276] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Updated VIF entry in instance network info cache for port 35590c26-21c7-47f0-9a37-848ba413367f. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1562.459401] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Updating instance_info_cache with network_info: [{"id": "35590c26-21c7-47f0-9a37-848ba413367f", "address": "fa:16:3e:62:0f:c7", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35590c26-21", "ovs_interfaceid": "35590c26-21c7-47f0-9a37-848ba413367f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.502636] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068427} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.502636] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1562.502910] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fb62a5-6aff-4296-ac71-b91401ea215f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.527482] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] ae37cae9-c82e-4775-8a8f-6bbf9108b0bd/ae37cae9-c82e-4775-8a8f-6bbf9108b0bd.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1562.530465] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7089dbd-ffed-4a28-a265-bdb73b628fc4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.546561] env[62619]: DEBUG nova.network.neutron [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Successfully updated port: a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1562.547944] env[62619]: DEBUG nova.network.neutron [-] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.556238] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1562.556238] env[62619]: value = "task-1777724" [ 1562.556238] env[62619]: _type = "Task" [ 1562.556238] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.562425] env[62619]: DEBUG nova.network.neutron [-] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.568803] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777724, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.571662] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.219s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.574753] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.358s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.574821] env[62619]: DEBUG nova.objects.instance [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1562.608978] env[62619]: INFO nova.scheduler.client.report [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Deleted allocations for instance 39adf15c-f77e-4737-aeeb-258887007b9a [ 1562.641784] env[62619]: DEBUG nova.network.neutron [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Updated VIF entry in instance network info cache for port 52679af0-12c1-41ec-927a-590d8d45fce0. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1562.641784] env[62619]: DEBUG nova.network.neutron [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Updating instance_info_cache with network_info: [{"id": "52679af0-12c1-41ec-927a-590d8d45fce0", "address": "fa:16:3e:35:73:8e", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52679af0-12", "ovs_interfaceid": "52679af0-12c1-41ec-927a-590d8d45fce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.668900] env[62619]: INFO nova.compute.manager [-] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Took 1.06 seconds to deallocate network for instance. [ 1562.767027] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777722, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.828670] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.886511] env[62619]: DEBUG nova.compute.manager [None req-9e879ad6-f720-46a0-8d93-d8b89d2817c4 tempest-ServerExternalEventsTest-986062512 tempest-ServerExternalEventsTest-986062512-project] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Received event network-changed {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1562.886605] env[62619]: DEBUG nova.compute.manager [None req-9e879ad6-f720-46a0-8d93-d8b89d2817c4 tempest-ServerExternalEventsTest-986062512 tempest-ServerExternalEventsTest-986062512-project] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Refreshing instance network info cache due to event network-changed. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1562.886822] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9e879ad6-f720-46a0-8d93-d8b89d2817c4 tempest-ServerExternalEventsTest-986062512 tempest-ServerExternalEventsTest-986062512-project] Acquiring lock "refresh_cache-fab3d689-9e30-4afd-b0cc-49c6d2870c50" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.906804] env[62619]: DEBUG oslo_vmware.api [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777718, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.582179} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.907645] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1562.907980] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1562.908183] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1562.908371] env[62619]: INFO nova.compute.manager [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Took 1.76 seconds to destroy the instance on the hypervisor. [ 1562.908687] env[62619]: DEBUG oslo.service.loopingcall [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1562.909323] env[62619]: DEBUG nova.compute.manager [-] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1562.909412] env[62619]: DEBUG nova.network.neutron [-] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1562.966034] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Releasing lock "refresh_cache-fab3d689-9e30-4afd-b0cc-49c6d2870c50" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.966034] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Received event network-changed-f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1562.966034] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Refreshing instance network info cache due to event network-changed-f3bed801-3e28-49e7-83d3-60dcdf9a38ea. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1562.966034] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquiring lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.966034] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquired lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.966034] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Refreshing network info cache for port f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1562.967078] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9e879ad6-f720-46a0-8d93-d8b89d2817c4 tempest-ServerExternalEventsTest-986062512 tempest-ServerExternalEventsTest-986062512-project] Acquired lock "refresh_cache-fab3d689-9e30-4afd-b0cc-49c6d2870c50" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.967815] env[62619]: DEBUG nova.network.neutron [None req-9e879ad6-f720-46a0-8d93-d8b89d2817c4 tempest-ServerExternalEventsTest-986062512 tempest-ServerExternalEventsTest-986062512-project] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1563.049734] env[62619]: INFO nova.compute.manager [-] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Took 1.69 seconds to deallocate network for instance. [ 1563.050112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.050260] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.050395] env[62619]: DEBUG nova.network.neutron [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1563.065975] env[62619]: INFO nova.compute.manager [-] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Took 1.72 seconds to deallocate network for instance. [ 1563.086080] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777724, 'name': ReconfigVM_Task, 'duration_secs': 0.44148} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.086671] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Reconfigured VM instance instance-00000038 to attach disk [datastore1] ae37cae9-c82e-4775-8a8f-6bbf9108b0bd/ae37cae9-c82e-4775-8a8f-6bbf9108b0bd.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1563.088086] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-816c0cdb-d522-4525-84af-791c8d2cbde2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.096516] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1563.096516] env[62619]: value = "task-1777725" [ 1563.096516] env[62619]: _type = "Task" [ 1563.096516] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.108702] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777725, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.120553] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9615273d-c86e-4490-bf79-bb52e95b2834 tempest-ServerGroupTestJSON-1174309374 tempest-ServerGroupTestJSON-1174309374-project-member] Lock "39adf15c-f77e-4737-aeeb-258887007b9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.939s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.144281] env[62619]: DEBUG oslo_concurrency.lockutils [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] Releasing lock "refresh_cache-ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.144614] env[62619]: DEBUG nova.compute.manager [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Received event network-vif-deleted-9e3b6fd0-aa70-4b70-b39f-e4ff9f3aa777 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1563.144815] env[62619]: DEBUG nova.compute.manager [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Received event network-vif-plugged-77589fc3-af02-4235-859e-fbf8a8322155 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1563.145044] env[62619]: DEBUG oslo_concurrency.lockutils [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] Acquiring lock "b1c3c213-599d-4cab-8224-d87467d774c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.145304] env[62619]: DEBUG oslo_concurrency.lockutils [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] Lock "b1c3c213-599d-4cab-8224-d87467d774c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.145481] env[62619]: DEBUG oslo_concurrency.lockutils [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] Lock "b1c3c213-599d-4cab-8224-d87467d774c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.145693] env[62619]: DEBUG nova.compute.manager [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] No waiting events found dispatching network-vif-plugged-77589fc3-af02-4235-859e-fbf8a8322155 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1563.145871] env[62619]: WARNING nova.compute.manager [req-ae68a08b-2d5e-4dd9-9fe2-ce1c73946da5 req-1f1c7257-c96c-4c09-8d5e-ca8388016158 service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Received unexpected event network-vif-plugged-77589fc3-af02-4235-859e-fbf8a8322155 for instance with vm_state building and task_state spawning. [ 1563.176852] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.215327] env[62619]: DEBUG nova.compute.manager [req-5615b76e-9c20-40ac-b6ef-fe73cad1a5b3 req-5906c43a-c88d-4a01-88bd-9bec8245444d service nova] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Received event network-vif-deleted-95751072-9868-4775-a7ca-205205689a74 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1563.266791] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777722, 'name': CreateVM_Task, 'duration_secs': 0.677888} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.266970] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1563.267703] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'boot_index': 0, 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369005', 'volume_id': 'da513efd-d6e1-4db5-90d2-52a5be9aa233', 'name': 'volume-da513efd-d6e1-4db5-90d2-52a5be9aa233', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b1c3c213-599d-4cab-8224-d87467d774c9', 'attached_at': '', 'detached_at': '', 'volume_id': 'da513efd-d6e1-4db5-90d2-52a5be9aa233', 'serial': 'da513efd-d6e1-4db5-90d2-52a5be9aa233'}, 'attachment_id': '0dcb6059-b07c-4a90-a6f4-c7f368ded7de', 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=62619) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1563.267907] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Root volume attach. Driver type: vmdk {{(pid=62619) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1563.268707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c3f03d-cf7c-4621-be68-43043dabaf09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.277977] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92e7a6e-eadc-4eed-9af6-adc060ff76f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.284819] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f0f5a5-42ba-4936-817e-b5771ed18fd0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.291611] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-1eb653a0-2651-4558-aa75-f092fc40ae6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.298746] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for the task: (returnval){ [ 1563.298746] env[62619]: value = "task-1777726" [ 1563.298746] env[62619]: _type = "Task" [ 1563.298746] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.311739] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777726, 'name': RelocateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.391865] env[62619]: DEBUG nova.compute.manager [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1563.392871] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0deace-f3d1-4fe2-86a4-3aef9005d90b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.491464] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1563.564131] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.583506] env[62619]: DEBUG oslo_concurrency.lockutils [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.589728] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8026d01a-74d6-4a71-ba81-8ae3f32f7f8e tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.589728] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 20.110s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.609843] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777725, 'name': Rename_Task, 'duration_secs': 0.155426} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.610650] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1563.611163] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85fe2874-52ce-4b8d-9671-9485b2717895 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.613666] env[62619]: DEBUG nova.network.neutron [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1563.621979] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1563.621979] env[62619]: value = "task-1777727" [ 1563.621979] env[62619]: _type = "Task" [ 1563.621979] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.631181] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777727, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.640793] env[62619]: DEBUG nova.network.neutron [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.718747] env[62619]: DEBUG nova.network.neutron [-] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.812671] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777726, 'name': RelocateVM_Task, 'duration_secs': 0.026203} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.813635] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1563.813913] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369005', 'volume_id': 'da513efd-d6e1-4db5-90d2-52a5be9aa233', 'name': 'volume-da513efd-d6e1-4db5-90d2-52a5be9aa233', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b1c3c213-599d-4cab-8224-d87467d774c9', 'attached_at': '', 'detached_at': '', 'volume_id': 'da513efd-d6e1-4db5-90d2-52a5be9aa233', 'serial': 'da513efd-d6e1-4db5-90d2-52a5be9aa233'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1563.814831] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ce15ff-71e4-4133-b0c8-9eeb1cc206d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.834168] env[62619]: DEBUG nova.network.neutron [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updating instance_info_cache with network_info: [{"id": "a10c5399-b021-4ea7-8a41-4d58136aff12", "address": "fa:16:3e:14:4c:3d", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa10c5399-b0", "ovs_interfaceid": "a10c5399-b021-4ea7-8a41-4d58136aff12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.836598] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639a3360-9675-4f1a-a246-f3c5671f2f87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.862409] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] volume-da513efd-d6e1-4db5-90d2-52a5be9aa233/volume-da513efd-d6e1-4db5-90d2-52a5be9aa233.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1563.866522] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2797bfc-5d86-4f6c-863e-05a0089f7052 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.890185] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for the task: (returnval){ [ 1563.890185] env[62619]: value = "task-1777728" [ 1563.890185] env[62619]: _type = "Task" [ 1563.890185] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.902193] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777728, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.905871] env[62619]: INFO nova.compute.manager [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] instance snapshotting [ 1563.906111] env[62619]: DEBUG nova.objects.instance [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'flavor' on Instance uuid da806d3f-79f0-4188-a2d8-0beeb9dfec1a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1563.932077] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "1f86b805-0fde-4bda-9a94-d440a670e23c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.932269] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "1f86b805-0fde-4bda-9a94-d440a670e23c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.932485] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "1f86b805-0fde-4bda-9a94-d440a670e23c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.932663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "1f86b805-0fde-4bda-9a94-d440a670e23c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.932845] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "1f86b805-0fde-4bda-9a94-d440a670e23c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.935301] env[62619]: INFO nova.compute.manager [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Terminating instance [ 1564.009363] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquiring lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.009889] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.010145] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquiring lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.010355] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.010525] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.013226] env[62619]: INFO nova.compute.manager [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Terminating instance [ 1564.096901] env[62619]: INFO nova.compute.claims [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1564.132985] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777727, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.145316] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Releasing lock "refresh_cache-0272ca2a-e9ff-4af5-8120-278a82d74627" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.145316] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Received event network-vif-plugged-52679af0-12c1-41ec-927a-590d8d45fce0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1564.145316] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Acquiring lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.145316] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.145316] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] Lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.145316] env[62619]: DEBUG nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] No waiting events found dispatching network-vif-plugged-52679af0-12c1-41ec-927a-590d8d45fce0 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1564.145316] env[62619]: WARNING nova.compute.manager [req-2e7aa637-54dd-4f6a-b42c-1fbebee9e9fa req-bbc8fcb0-2566-4892-b1f4-db1db1ae3d4c service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Received unexpected event network-vif-plugged-52679af0-12c1-41ec-927a-590d8d45fce0 for instance with vm_state building and task_state spawning. [ 1564.211086] env[62619]: DEBUG nova.network.neutron [None req-9e879ad6-f720-46a0-8d93-d8b89d2817c4 tempest-ServerExternalEventsTest-986062512 tempest-ServerExternalEventsTest-986062512-project] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Updating instance_info_cache with network_info: [{"id": "35590c26-21c7-47f0-9a37-848ba413367f", "address": "fa:16:3e:62:0f:c7", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35590c26-21", "ovs_interfaceid": "35590c26-21c7-47f0-9a37-848ba413367f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1564.224380] env[62619]: INFO nova.compute.manager [-] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Took 1.31 seconds to deallocate network for instance. [ 1564.339227] env[62619]: DEBUG nova.network.neutron [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Successfully updated port: 61e78327-dbb0-497b-be4b-14a0a1d8a5de {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1564.340405] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.341666] env[62619]: DEBUG nova.compute.manager [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Instance network_info: |[{"id": "a10c5399-b021-4ea7-8a41-4d58136aff12", "address": "fa:16:3e:14:4c:3d", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa10c5399-b0", "ovs_interfaceid": "a10c5399-b021-4ea7-8a41-4d58136aff12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1564.343856] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:4c:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a10c5399-b021-4ea7-8a41-4d58136aff12', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1564.352749] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Creating folder: Project (bf557954d79d4bb1939f6e65d4ed00b5). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1564.356016] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f11c7461-82e7-409b-af05-349d18c8400e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.367189] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Created folder: Project (bf557954d79d4bb1939f6e65d4ed00b5) in parent group-v368875. [ 1564.367189] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Creating folder: Instances. Parent ref: group-v369040. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1564.367189] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-465b7243-4d20-4e9c-a717-81fda8b42283 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.378224] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Created folder: Instances in parent group-v369040. [ 1564.378224] env[62619]: DEBUG oslo.service.loopingcall [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1564.378224] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1564.378224] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-798352f8-bee4-49a9-b165-ca38f320207a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.406805] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777728, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.410189] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1564.410189] env[62619]: value = "task-1777731" [ 1564.410189] env[62619]: _type = "Task" [ 1564.410189] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.411836] env[62619]: DEBUG nova.compute.manager [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Received event network-changed-77589fc3-af02-4235-859e-fbf8a8322155 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1564.412066] env[62619]: DEBUG nova.compute.manager [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Refreshing instance network info cache due to event network-changed-77589fc3-af02-4235-859e-fbf8a8322155. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1564.412309] env[62619]: DEBUG oslo_concurrency.lockutils [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] Acquiring lock "refresh_cache-b1c3c213-599d-4cab-8224-d87467d774c9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1564.412476] env[62619]: DEBUG oslo_concurrency.lockutils [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] Acquired lock "refresh_cache-b1c3c213-599d-4cab-8224-d87467d774c9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1564.412673] env[62619]: DEBUG nova.network.neutron [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Refreshing network info cache for port 77589fc3-af02-4235-859e-fbf8a8322155 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1564.421998] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a82271-2ce4-4782-93dc-5a0fda8d5c8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.446716] env[62619]: DEBUG nova.compute.manager [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1564.446968] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1564.451774] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1f55cf-e007-4fcb-a0d3-87dd2eeddeb0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.455756] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b44c8b-19e7-43ce-aa10-74028d417d8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.459228] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777731, 'name': CreateVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.469341] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1564.470701] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-508057d6-8558-4ab6-a0a8-b29c843b03ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.477228] env[62619]: DEBUG oslo_vmware.api [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1564.477228] env[62619]: value = "task-1777732" [ 1564.477228] env[62619]: _type = "Task" [ 1564.477228] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.487644] env[62619]: DEBUG oslo_vmware.api [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.519501] env[62619]: DEBUG nova.compute.manager [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1564.519501] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1564.519800] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db73e339-beec-41bc-8647-f08e860cc2ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.530154] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1564.530154] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0c4fbd3-b239-49d8-8ace-cb1dc1b54815 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.536317] env[62619]: DEBUG oslo_vmware.api [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for the task: (returnval){ [ 1564.536317] env[62619]: value = "task-1777733" [ 1564.536317] env[62619]: _type = "Task" [ 1564.536317] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.549427] env[62619]: DEBUG oslo_vmware.api [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777733, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.604132] env[62619]: INFO nova.compute.resource_tracker [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating resource usage from migration 54adc27f-a3e2-473a-ac9e-41a3f933f141 [ 1564.634731] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777727, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.714742] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9e879ad6-f720-46a0-8d93-d8b89d2817c4 tempest-ServerExternalEventsTest-986062512 tempest-ServerExternalEventsTest-986062512-project] Releasing lock "refresh_cache-fab3d689-9e30-4afd-b0cc-49c6d2870c50" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.731173] env[62619]: DEBUG oslo_concurrency.lockutils [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.841483] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "refresh_cache-5b1008fb-7c0a-4e12-90f8-119a82ea62f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1564.841636] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "refresh_cache-5b1008fb-7c0a-4e12-90f8-119a82ea62f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1564.841859] env[62619]: DEBUG nova.network.neutron [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1564.907112] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777728, 'name': ReconfigVM_Task, 'duration_secs': 0.722517} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.909701] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Reconfigured VM instance instance-00000039 to attach disk [datastore1] volume-da513efd-d6e1-4db5-90d2-52a5be9aa233/volume-da513efd-d6e1-4db5-90d2-52a5be9aa233.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1564.914726] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d04343e7-abc6-47f2-9485-e0f10dab3ab5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.938858] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777731, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.940383] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for the task: (returnval){ [ 1564.940383] env[62619]: value = "task-1777734" [ 1564.940383] env[62619]: _type = "Task" [ 1564.940383] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.955700] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777734, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.971633] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1564.971971] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2493be70-49b8-4dc8-bd53-f040a7378c36 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.983200] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1564.983200] env[62619]: value = "task-1777735" [ 1564.983200] env[62619]: _type = "Task" [ 1564.983200] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.992381] env[62619]: DEBUG oslo_vmware.api [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777732, 'name': PowerOffVM_Task, 'duration_secs': 0.283228} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.995501] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1564.995729] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1564.996310] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc2e906a-6553-4fac-a2c6-0b23cfceed70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.000842] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777735, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.047898] env[62619]: DEBUG oslo_vmware.api [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777733, 'name': PowerOffVM_Task, 'duration_secs': 0.239183} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.047898] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1565.047898] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1565.049416] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34b69106-8979-48bb-aaf5-febdd06a08ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.130241] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e94ce82-90e1-4444-b173-a919be9a3860 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.136417] env[62619]: DEBUG oslo_vmware.api [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777727, 'name': PowerOnVM_Task, 'duration_secs': 1.261474} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.137084] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1565.137303] env[62619]: INFO nova.compute.manager [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Took 10.43 seconds to spawn the instance on the hypervisor. [ 1565.137498] env[62619]: DEBUG nova.compute.manager [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1565.138466] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06ea518-d57b-44ec-b0f3-0d4c798e28aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.146346] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5628799c-b156-4940-9088-9151993faa58 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.186215] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d487cae-009d-48d9-a2b6-ca74c8a41fcf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.188881] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1565.189080] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1565.189273] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Deleting the datastore file [datastore1] 1f86b805-0fde-4bda-9a94-d440a670e23c {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1565.189486] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-917d4a43-d55d-49fa-b579-5c1a0ab1e1d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.209059] env[62619]: DEBUG oslo_vmware.api [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for the task: (returnval){ [ 1565.209059] env[62619]: value = "task-1777738" [ 1565.209059] env[62619]: _type = "Task" [ 1565.209059] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.215533] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35dde3a1-4976-4e7b-bf60-9fac7bde446f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.224386] env[62619]: DEBUG oslo_vmware.api [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777738, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.232437] env[62619]: DEBUG nova.compute.provider_tree [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1565.256681] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.256770] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.256956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.257153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.257319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.259629] env[62619]: INFO nova.compute.manager [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Terminating instance [ 1565.370774] env[62619]: DEBUG nova.network.neutron [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Updated VIF entry in instance network info cache for port 77589fc3-af02-4235-859e-fbf8a8322155. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1565.370938] env[62619]: DEBUG nova.network.neutron [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Updating instance_info_cache with network_info: [{"id": "77589fc3-af02-4235-859e-fbf8a8322155", "address": "fa:16:3e:49:48:47", "network": {"id": "655928a1-c906-43b7-b355-a21b4db5f18b", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-689245540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3be1c72bfb5c4cfd9c45ab817384e11b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77589fc3-af", "ovs_interfaceid": "77589fc3-af02-4235-859e-fbf8a8322155", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1565.375837] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquiring lock "ed34ae20-a891-45aa-8124-f36f264937f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.376296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lock "ed34ae20-a891-45aa-8124-f36f264937f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.376296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquiring lock "ed34ae20-a891-45aa-8124-f36f264937f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.376423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lock "ed34ae20-a891-45aa-8124-f36f264937f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.377509] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lock "ed34ae20-a891-45aa-8124-f36f264937f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.378416] env[62619]: INFO nova.compute.manager [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Terminating instance [ 1565.384258] env[62619]: DEBUG nova.network.neutron [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1565.424085] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777731, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.450237] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.495026] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777735, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.672878] env[62619]: INFO nova.compute.manager [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Took 39.96 seconds to build instance. [ 1565.722594] env[62619]: DEBUG oslo_vmware.api [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Task: {'id': task-1777738, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310481} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.723098] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1565.723462] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1565.725108] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1565.725108] env[62619]: INFO nova.compute.manager [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1565.725108] env[62619]: DEBUG oslo.service.loopingcall [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1565.725487] env[62619]: DEBUG nova.compute.manager [-] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1565.725615] env[62619]: DEBUG nova.network.neutron [-] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1565.755803] env[62619]: ERROR nova.scheduler.client.report [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [req-9722b9e1-db2a-4584-9f52-949280128dc3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9722b9e1-db2a-4584-9f52-949280128dc3"}]} [ 1565.764901] env[62619]: DEBUG nova.compute.manager [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1565.765190] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1565.766516] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4df5684-b3de-4aee-8332-5823978af0a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.776856] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1565.776856] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b21638a0-88f3-4fd4-bed8-9a17f88cdbbc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.781746] env[62619]: DEBUG nova.scheduler.client.report [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1565.786139] env[62619]: DEBUG oslo_vmware.api [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1565.786139] env[62619]: value = "task-1777739" [ 1565.786139] env[62619]: _type = "Task" [ 1565.786139] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.796865] env[62619]: DEBUG oslo_vmware.api [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777739, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.803443] env[62619]: DEBUG nova.scheduler.client.report [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1565.803673] env[62619]: DEBUG nova.compute.provider_tree [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1565.821898] env[62619]: DEBUG nova.scheduler.client.report [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1565.864614] env[62619]: DEBUG nova.scheduler.client.report [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1565.873432] env[62619]: DEBUG oslo_concurrency.lockutils [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] Releasing lock "refresh_cache-b1c3c213-599d-4cab-8224-d87467d774c9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1565.873711] env[62619]: DEBUG nova.compute.manager [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Received event network-vif-plugged-a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1565.873945] env[62619]: DEBUG oslo_concurrency.lockutils [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] Acquiring lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.874190] env[62619]: DEBUG oslo_concurrency.lockutils [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.874349] env[62619]: DEBUG oslo_concurrency.lockutils [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.874522] env[62619]: DEBUG nova.compute.manager [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] No waiting events found dispatching network-vif-plugged-a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1565.874684] env[62619]: WARNING nova.compute.manager [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Received unexpected event network-vif-plugged-a10c5399-b021-4ea7-8a41-4d58136aff12 for instance with vm_state building and task_state spawning. [ 1565.874840] env[62619]: DEBUG nova.compute.manager [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Received event network-changed-a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1565.874984] env[62619]: DEBUG nova.compute.manager [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Refreshing instance network info cache due to event network-changed-a10c5399-b021-4ea7-8a41-4d58136aff12. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1565.875194] env[62619]: DEBUG oslo_concurrency.lockutils [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] Acquiring lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1565.875340] env[62619]: DEBUG oslo_concurrency.lockutils [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] Acquired lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1565.875492] env[62619]: DEBUG nova.network.neutron [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Refreshing network info cache for port a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1565.882139] env[62619]: DEBUG nova.compute.manager [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1565.882344] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1565.883838] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9625f89-f589-41b9-95a6-d0cff3b056d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.894390] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1565.896663] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb714c9f-9d61-4923-b5f8-9446e4060120 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.903217] env[62619]: DEBUG oslo_vmware.api [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for the task: (returnval){ [ 1565.903217] env[62619]: value = "task-1777740" [ 1565.903217] env[62619]: _type = "Task" [ 1565.903217] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.914962] env[62619]: DEBUG oslo_vmware.api [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777740, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.926765] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.927023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.935759] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777731, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.957366] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1565.957576] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1565.957754] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Deleting the datastore file [datastore1] fab3d689-9e30-4afd-b0cc-49c6d2870c50 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1565.960923] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-604c82b8-0321-470e-b90a-f57bb79e3ab9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.963688] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777734, 'name': ReconfigVM_Task, 'duration_secs': 0.985449} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.964093] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369005', 'volume_id': 'da513efd-d6e1-4db5-90d2-52a5be9aa233', 'name': 'volume-da513efd-d6e1-4db5-90d2-52a5be9aa233', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b1c3c213-599d-4cab-8224-d87467d774c9', 'attached_at': '', 'detached_at': '', 'volume_id': 'da513efd-d6e1-4db5-90d2-52a5be9aa233', 'serial': 'da513efd-d6e1-4db5-90d2-52a5be9aa233'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1565.965477] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-714ee844-45e6-4f2d-8d58-7346220f02cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.969893] env[62619]: DEBUG oslo_vmware.api [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for the task: (returnval){ [ 1565.969893] env[62619]: value = "task-1777741" [ 1565.969893] env[62619]: _type = "Task" [ 1565.969893] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.974944] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for the task: (returnval){ [ 1565.974944] env[62619]: value = "task-1777742" [ 1565.974944] env[62619]: _type = "Task" [ 1565.974944] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.989057] env[62619]: DEBUG nova.compute.manager [req-f9eacf66-7fda-440a-9c6b-f259958eefd7 req-2811a669-6f69-4b41-8a34-2b620788e9bd service nova] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Received event network-vif-deleted-f3bed801-3e28-49e7-83d3-60dcdf9a38ea {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1565.989174] env[62619]: DEBUG nova.compute.manager [req-f9eacf66-7fda-440a-9c6b-f259958eefd7 req-2811a669-6f69-4b41-8a34-2b620788e9bd service nova] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Received event network-vif-deleted-535fa511-22ab-4762-80f3-e92464fddeb5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1565.989916] env[62619]: DEBUG oslo_vmware.api [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777741, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.990957] env[62619]: DEBUG nova.network.neutron [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Updating instance_info_cache with network_info: [{"id": "61e78327-dbb0-497b-be4b-14a0a1d8a5de", "address": "fa:16:3e:a9:78:54", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61e78327-db", "ovs_interfaceid": "61e78327-dbb0-497b-be4b-14a0a1d8a5de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.000786] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777742, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.010886] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777735, 'name': CreateSnapshot_Task, 'duration_secs': 1.006343} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.011220] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1566.012915] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fef95ab-da4a-4c60-a46c-92e82fb584d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.176594] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fbd6e4bf-95d0-4b74-b02a-61fd004ac425 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.177s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.299641] env[62619]: DEBUG oslo_vmware.api [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777739, 'name': PowerOffVM_Task, 'duration_secs': 0.243094} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.299962] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1566.300099] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1566.300475] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00b812f5-dfdd-4368-90d1-125467555cfb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.416449] env[62619]: DEBUG oslo_vmware.api [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777740, 'name': PowerOffVM_Task, 'duration_secs': 0.24945} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.416679] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1566.416844] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1566.417185] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9fb933a-446e-4bb2-9a33-94e39d0561fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.425388] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c989eeae-1146-4303-8ad2-a6087e089b12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.432707] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777731, 'name': CreateVM_Task, 'duration_secs': 1.537838} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.436415] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1566.437225] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.437382] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.437687] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1566.439007] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beec51e4-f23d-4557-8c67-1436e134bea0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.441905] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d9f9c64-6b32-4828-9265-7cd91a4a77dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.447740] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1566.447740] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52be0042-e1a2-66d0-5f9d-a94352ec2a4e" [ 1566.447740] env[62619]: _type = "Task" [ 1566.447740] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.478033] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e32955-ba19-4179-b3e4-c75f28d8199c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.487919] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52be0042-e1a2-66d0-5f9d-a94352ec2a4e, 'name': SearchDatastore_Task, 'duration_secs': 0.014365} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.493774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.494031] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1566.494265] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.494406] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.494577] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1566.494914] env[62619]: DEBUG oslo_vmware.api [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Task: {'id': task-1777741, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145466} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.495177] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b66824e-62fd-493e-ab01-87398245e290 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.498249] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45c60b3-67d5-4569-b07b-fb6333bc91bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.503886] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1566.504087] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1566.504257] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1566.504424] env[62619]: INFO nova.compute.manager [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Took 1.99 seconds to destroy the instance on the hypervisor. [ 1566.504649] env[62619]: DEBUG oslo.service.loopingcall [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1566.506648] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "refresh_cache-5b1008fb-7c0a-4e12-90f8-119a82ea62f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.506922] env[62619]: DEBUG nova.compute.manager [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Instance network_info: |[{"id": "61e78327-dbb0-497b-be4b-14a0a1d8a5de", "address": "fa:16:3e:a9:78:54", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61e78327-db", "ovs_interfaceid": "61e78327-dbb0-497b-be4b-14a0a1d8a5de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1566.507191] env[62619]: DEBUG nova.compute.manager [-] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1566.507292] env[62619]: DEBUG nova.network.neutron [-] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1566.509485] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:78:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61e78327-dbb0-497b-be4b-14a0a1d8a5de', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1566.517077] env[62619]: DEBUG oslo.service.loopingcall [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1566.521714] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1566.522323] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777742, 'name': Rename_Task, 'duration_secs': 0.170612} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.531940] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50419e14-1218-4d0f-8769-774ead94ca94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.546446] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1566.546638] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1566.546818] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1566.546983] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleting the datastore file [datastore1] cd8b8828-79cf-4a7c-b018-b8bd745aaa45 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1566.553512] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1566.554041] env[62619]: DEBUG nova.compute.provider_tree [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1566.559084] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a048c602-8fdd-49b9-9a0f-d6250c03da7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.560653] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d823497-06ce-4378-b756-036f783bca9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.562140] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0aef669d-29d8-4d52-923d-7f2a75d5b105 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.565398] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1566.565567] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1566.567586] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc971a23-f823-4909-911f-9323e9f5cad7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.576033] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1566.576033] env[62619]: value = "task-1777745" [ 1566.576033] env[62619]: _type = "Task" [ 1566.576033] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.577747] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for the task: (returnval){ [ 1566.577747] env[62619]: value = "task-1777746" [ 1566.577747] env[62619]: _type = "Task" [ 1566.577747] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.581230] env[62619]: DEBUG oslo_vmware.api [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1566.581230] env[62619]: value = "task-1777747" [ 1566.581230] env[62619]: _type = "Task" [ 1566.581230] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.581472] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1566.581472] env[62619]: value = "task-1777748" [ 1566.581472] env[62619]: _type = "Task" [ 1566.581472] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.581715] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1566.581715] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529b53a2-120b-131e-acc0-bc76d130c71a" [ 1566.581715] env[62619]: _type = "Task" [ 1566.581715] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.585985] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1566.587273] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1566.587273] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Deleting the datastore file [datastore1] ed34ae20-a891-45aa-8124-f36f264937f8 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1566.594498] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6bb586f-5f13-4557-bc18-2ba2c52ba680 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.600090] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777745, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.608280] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777746, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.618785] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529b53a2-120b-131e-acc0-bc76d130c71a, 'name': SearchDatastore_Task, 'duration_secs': 0.024679} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.619094] env[62619]: DEBUG oslo_vmware.api [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777747, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.619749] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777748, 'name': CloneVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.621826] env[62619]: DEBUG oslo_vmware.api [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for the task: (returnval){ [ 1566.621826] env[62619]: value = "task-1777749" [ 1566.621826] env[62619]: _type = "Task" [ 1566.621826] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.622023] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4d8b9bc-8c5f-49f6-8f40-17cbd476b701 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.633048] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1566.633048] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52532704-e96f-2008-a860-10cae30f4090" [ 1566.633048] env[62619]: _type = "Task" [ 1566.633048] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.640033] env[62619]: DEBUG oslo_vmware.api [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777749, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.645665] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52532704-e96f-2008-a860-10cae30f4090, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.683032] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1566.959930] env[62619]: DEBUG nova.network.neutron [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updated VIF entry in instance network info cache for port a10c5399-b021-4ea7-8a41-4d58136aff12. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1566.960340] env[62619]: DEBUG nova.network.neutron [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updating instance_info_cache with network_info: [{"id": "a10c5399-b021-4ea7-8a41-4d58136aff12", "address": "fa:16:3e:14:4c:3d", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa10c5399-b0", "ovs_interfaceid": "a10c5399-b021-4ea7-8a41-4d58136aff12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.025861] env[62619]: DEBUG nova.compute.manager [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Received event network-vif-plugged-61e78327-dbb0-497b-be4b-14a0a1d8a5de {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1567.026619] env[62619]: DEBUG oslo_concurrency.lockutils [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] Acquiring lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.026841] env[62619]: DEBUG oslo_concurrency.lockutils [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.027021] env[62619]: DEBUG oslo_concurrency.lockutils [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.027196] env[62619]: DEBUG nova.compute.manager [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] No waiting events found dispatching network-vif-plugged-61e78327-dbb0-497b-be4b-14a0a1d8a5de {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1567.027362] env[62619]: WARNING nova.compute.manager [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Received unexpected event network-vif-plugged-61e78327-dbb0-497b-be4b-14a0a1d8a5de for instance with vm_state building and task_state spawning. [ 1567.027521] env[62619]: DEBUG nova.compute.manager [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Received event network-changed-61e78327-dbb0-497b-be4b-14a0a1d8a5de {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1567.027694] env[62619]: DEBUG nova.compute.manager [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Refreshing instance network info cache due to event network-changed-61e78327-dbb0-497b-be4b-14a0a1d8a5de. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1567.027871] env[62619]: DEBUG oslo_concurrency.lockutils [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] Acquiring lock "refresh_cache-5b1008fb-7c0a-4e12-90f8-119a82ea62f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.028014] env[62619]: DEBUG oslo_concurrency.lockutils [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] Acquired lock "refresh_cache-5b1008fb-7c0a-4e12-90f8-119a82ea62f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.028171] env[62619]: DEBUG nova.network.neutron [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Refreshing network info cache for port 61e78327-dbb0-497b-be4b-14a0a1d8a5de {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1567.087274] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777745, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.103671] env[62619]: DEBUG nova.scheduler.client.report [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 86 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1567.104134] env[62619]: DEBUG nova.compute.provider_tree [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 86 to 87 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1567.104482] env[62619]: DEBUG nova.compute.provider_tree [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1567.118197] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777746, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.128765] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777748, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.129354] env[62619]: DEBUG oslo_vmware.api [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777747, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251081} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.136128] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1567.136526] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1567.136860] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1567.137212] env[62619]: INFO nova.compute.manager [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Took 1.37 seconds to destroy the instance on the hypervisor. [ 1567.137627] env[62619]: DEBUG oslo.service.loopingcall [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1567.139239] env[62619]: DEBUG nova.compute.manager [-] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1567.139239] env[62619]: DEBUG nova.network.neutron [-] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1567.152901] env[62619]: DEBUG oslo_vmware.api [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Task: {'id': task-1777749, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217748} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.153813] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1567.154184] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1567.154514] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1567.154906] env[62619]: INFO nova.compute.manager [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1567.155285] env[62619]: DEBUG oslo.service.loopingcall [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1567.161350] env[62619]: DEBUG nova.compute.manager [-] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1567.161499] env[62619]: DEBUG nova.network.neutron [-] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1567.164882] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52532704-e96f-2008-a860-10cae30f4090, 'name': SearchDatastore_Task, 'duration_secs': 0.022332} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.165165] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.165481] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85/4cd6dafd-4f19-4d0f-8e07-8171a6a71e85.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1567.166143] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32f61572-90ce-4ab8-86e8-5fad62970669 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.174043] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1567.174043] env[62619]: value = "task-1777750" [ 1567.174043] env[62619]: _type = "Task" [ 1567.174043] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.186447] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777750, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.207640] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.314591] env[62619]: DEBUG nova.network.neutron [-] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.336391] env[62619]: DEBUG nova.network.neutron [-] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.463874] env[62619]: DEBUG oslo_concurrency.lockutils [req-fc846502-68a3-49fc-8e20-bf0e31759994 req-769090ac-0a15-492f-a626-ab468bfd59ad service nova] Releasing lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.589055] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777745, 'name': CreateVM_Task, 'duration_secs': 0.907079} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.596967] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1567.598264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.598744] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.599161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1567.600554] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61374e55-88c8-479f-b57d-ba85acd23d76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.608157] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777748, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.612419] env[62619]: DEBUG oslo_vmware.api [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1777746, 'name': PowerOnVM_Task, 'duration_secs': 0.70873} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.616037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 4.026s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.616386] env[62619]: INFO nova.compute.manager [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Migrating [ 1567.624058] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1567.624391] env[62619]: INFO nova.compute.manager [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Took 8.33 seconds to spawn the instance on the hypervisor. [ 1567.624678] env[62619]: DEBUG nova.compute.manager [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1567.629898] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.935s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.630423] env[62619]: DEBUG nova.objects.instance [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lazy-loading 'resources' on Instance uuid 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1567.633729] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b4503a-61aa-4de1-9e1f-40f28e2d8a10 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.639270] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1567.639270] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5260bab2-e70b-9c13-c9a1-ec37e0c59304" [ 1567.639270] env[62619]: _type = "Task" [ 1567.639270] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.673363] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5260bab2-e70b-9c13-c9a1-ec37e0c59304, 'name': SearchDatastore_Task, 'duration_secs': 0.0522} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.673658] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.673923] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1567.674571] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.674791] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.674916] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1567.676136] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e8bf227-1ef0-43d1-b0ed-0f57f7e36f9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.693611] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777750, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508515} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.693611] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85/4cd6dafd-4f19-4d0f-8e07-8171a6a71e85.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1567.695591] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1567.695591] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1567.695591] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1567.697165] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8dc0be48-317b-454b-9def-5e28a2d27e3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.699865] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f08cfb75-e11f-4dbd-a3df-3379e2263f8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.709196] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1567.709196] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5289dab3-b78d-0b7a-2919-a27cb2f89470" [ 1567.709196] env[62619]: _type = "Task" [ 1567.709196] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.714573] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1567.714573] env[62619]: value = "task-1777751" [ 1567.714573] env[62619]: _type = "Task" [ 1567.714573] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.724225] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5289dab3-b78d-0b7a-2919-a27cb2f89470, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.730214] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777751, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.820234] env[62619]: INFO nova.compute.manager [-] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Took 1.31 seconds to deallocate network for instance. [ 1567.841069] env[62619]: INFO nova.compute.manager [-] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Took 2.12 seconds to deallocate network for instance. [ 1567.893498] env[62619]: DEBUG nova.network.neutron [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Updated VIF entry in instance network info cache for port 61e78327-dbb0-497b-be4b-14a0a1d8a5de. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1567.893986] env[62619]: DEBUG nova.network.neutron [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Updating instance_info_cache with network_info: [{"id": "61e78327-dbb0-497b-be4b-14a0a1d8a5de", "address": "fa:16:3e:a9:78:54", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61e78327-db", "ovs_interfaceid": "61e78327-dbb0-497b-be4b-14a0a1d8a5de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.102215] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777748, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.152045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.152045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.152045] env[62619]: DEBUG nova.network.neutron [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1568.175937] env[62619]: INFO nova.compute.manager [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Took 40.62 seconds to build instance. [ 1568.230743] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5289dab3-b78d-0b7a-2919-a27cb2f89470, 'name': SearchDatastore_Task, 'duration_secs': 0.012958} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.236491] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777751, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.172154} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.240415] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf255958-caee-41e1-8ceb-4fc59b9e1450 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.244197] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1568.246164] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ada81e-df7d-43be-a3c4-0cca5eba2ea9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.256918] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1568.256918] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5247c9dd-37aa-9dbf-bc66-471d9f25f782" [ 1568.256918] env[62619]: _type = "Task" [ 1568.256918] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.279426] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85/4cd6dafd-4f19-4d0f-8e07-8171a6a71e85.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1568.284435] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3791b99b-d556-4f03-ba43-beb288a2ef4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.310701] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5247c9dd-37aa-9dbf-bc66-471d9f25f782, 'name': SearchDatastore_Task, 'duration_secs': 0.016414} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.312156] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.315159] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 5b1008fb-7c0a-4e12-90f8-119a82ea62f1/5b1008fb-7c0a-4e12-90f8-119a82ea62f1.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1568.315159] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1568.315159] env[62619]: value = "task-1777752" [ 1568.315159] env[62619]: _type = "Task" [ 1568.315159] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.315159] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34fe721f-bf74-4202-9868-b8eefac9cb98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.332828] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.332828] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1568.332828] env[62619]: value = "task-1777753" [ 1568.332828] env[62619]: _type = "Task" [ 1568.332828] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.332828] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777752, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.332828] env[62619]: DEBUG nova.network.neutron [-] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.349048] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777753, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.349048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.371077] env[62619]: DEBUG nova.compute.manager [req-90bace58-3a40-4bdb-a49e-5301a40d3855 req-087df1db-5880-4276-9147-07985c03f248 service nova] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Received event network-vif-deleted-35590c26-21c7-47f0-9a37-848ba413367f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1568.375938] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57518dbe-6da4-47f2-a958-0455994e400a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.384471] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d0e327-8aa4-4aad-9a09-b0bf517ddd3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.414851] env[62619]: DEBUG oslo_concurrency.lockutils [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] Releasing lock "refresh_cache-5b1008fb-7c0a-4e12-90f8-119a82ea62f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.415127] env[62619]: DEBUG nova.compute.manager [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Received event network-vif-deleted-7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1568.415308] env[62619]: INFO nova.compute.manager [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Neutron deleted interface 7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575; detaching it from the instance and deleting it from the info cache [ 1568.415601] env[62619]: DEBUG nova.network.neutron [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Updating instance_info_cache with network_info: [{"id": "4603d400-5a47-4629-9915-a5315eeca473", "address": "fa:16:3e:f8:73:07", "network": {"id": "bc8f0b8b-aaf4-4e38-ad5a-c1e19b3f4a04", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-796388189", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8311e2dca4814727b91967833796fc66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4603d400-5a", "ovs_interfaceid": "4603d400-5a47-4629-9915-a5315eeca473", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.418062] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439663f5-350e-4c55-8820-62560bf3fb45 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.427134] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea65e06-4821-4e61-b101-43af79fcb874 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.441778] env[62619]: DEBUG nova.compute.provider_tree [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.603262] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777748, 'name': CloneVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.678836] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf449f76-8048-4770-9328-6516d3809b2f tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Lock "b1c3c213-599d-4cab-8224-d87467d774c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.951s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.753922] env[62619]: DEBUG nova.network.neutron [-] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.825836] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777752, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.835538] env[62619]: INFO nova.compute.manager [-] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Took 1.70 seconds to deallocate network for instance. [ 1568.857221] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777753, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.922466] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1954fa28-fe01-4cc1-b272-a20b1119db37 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.934164] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49728c8d-05fb-41c5-8b72-6885f80d7c26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.950482] env[62619]: DEBUG nova.scheduler.client.report [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1568.984377] env[62619]: DEBUG nova.compute.manager [req-0ca0e207-61c2-43a9-b615-cb149ea6660f req-80ff8073-ddf7-4179-830f-52c3fcac05ba service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Detach interface failed, port_id=7e9d4c42-dcb9-4beb-8cc0-3eb5f4ec5575, reason: Instance 1f86b805-0fde-4bda-9a94-d440a670e23c could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1569.085796] env[62619]: DEBUG nova.network.neutron [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance_info_cache with network_info: [{"id": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "address": "fa:16:3e:70:d7:d3", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1ace9af-97", "ovs_interfaceid": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.105117] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777748, 'name': CloneVM_Task, 'duration_secs': 2.033308} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.107338] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Created linked-clone VM from snapshot [ 1569.115336] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96891144-13a3-446b-854f-e2877203ba57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.128996] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Uploading image d934e05c-1334-4363-aa23-761cfae0b75b {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1569.167838] env[62619]: DEBUG oslo_vmware.rw_handles [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1569.167838] env[62619]: value = "vm-369045" [ 1569.167838] env[62619]: _type = "VirtualMachine" [ 1569.167838] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1569.169272] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-61468f19-b523-4fe9-bdbc-a5965a8e2f3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.180434] env[62619]: DEBUG oslo_vmware.rw_handles [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lease: (returnval){ [ 1569.180434] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5259e7d8-046f-4afa-3c66-322d366a6aff" [ 1569.180434] env[62619]: _type = "HttpNfcLease" [ 1569.180434] env[62619]: } obtained for exporting VM: (result){ [ 1569.180434] env[62619]: value = "vm-369045" [ 1569.180434] env[62619]: _type = "VirtualMachine" [ 1569.180434] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1569.180434] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the lease: (returnval){ [ 1569.180434] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5259e7d8-046f-4afa-3c66-322d366a6aff" [ 1569.180434] env[62619]: _type = "HttpNfcLease" [ 1569.180434] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1569.183747] env[62619]: DEBUG nova.compute.manager [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1569.198035] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1569.198035] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5259e7d8-046f-4afa-3c66-322d366a6aff" [ 1569.198035] env[62619]: _type = "HttpNfcLease" [ 1569.198035] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1569.206992] env[62619]: INFO nova.compute.manager [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Rebuilding instance [ 1569.213477] env[62619]: DEBUG nova.compute.manager [req-0ced73b2-22d8-46ec-9fe7-a76fdb845e84 req-b5a48df0-2320-4a67-a6c3-e9db59ca582f service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Received event network-vif-deleted-4603d400-5a47-4629-9915-a5315eeca473 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1569.213721] env[62619]: INFO nova.compute.manager [req-0ced73b2-22d8-46ec-9fe7-a76fdb845e84 req-b5a48df0-2320-4a67-a6c3-e9db59ca582f service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Neutron deleted interface 4603d400-5a47-4629-9915-a5315eeca473; detaching it from the instance and deleting it from the info cache [ 1569.214014] env[62619]: DEBUG nova.network.neutron [req-0ced73b2-22d8-46ec-9fe7-a76fdb845e84 req-b5a48df0-2320-4a67-a6c3-e9db59ca582f service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.257423] env[62619]: INFO nova.compute.manager [-] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Took 2.10 seconds to deallocate network for instance. [ 1569.279601] env[62619]: DEBUG nova.compute.manager [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1569.280552] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e248cd-f85b-45b8-a37b-937ba2c50356 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.325655] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777752, 'name': ReconfigVM_Task, 'duration_secs': 0.882917} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.325922] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85/4cd6dafd-4f19-4d0f-8e07-8171a6a71e85.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1569.326655] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f6c937e-c193-423b-b9ca-83933422c52b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.334199] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1569.334199] env[62619]: value = "task-1777755" [ 1569.334199] env[62619]: _type = "Task" [ 1569.334199] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.349695] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.350380] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777755, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.354890] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777753, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.998596} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.355167] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 5b1008fb-7c0a-4e12-90f8-119a82ea62f1/5b1008fb-7c0a-4e12-90f8-119a82ea62f1.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1569.355479] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1569.355857] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e1d4c3a-9c79-4072-80a3-7050788f244c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.366095] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1569.366095] env[62619]: value = "task-1777756" [ 1569.366095] env[62619]: _type = "Task" [ 1569.366095] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.374682] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777756, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.457809] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.828s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.460774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.020s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.462533] env[62619]: INFO nova.compute.claims [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1569.492688] env[62619]: INFO nova.scheduler.client.report [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Deleted allocations for instance 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec [ 1569.592239] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.688498] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1569.688498] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5259e7d8-046f-4afa-3c66-322d366a6aff" [ 1569.688498] env[62619]: _type = "HttpNfcLease" [ 1569.688498] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1569.688797] env[62619]: DEBUG oslo_vmware.rw_handles [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1569.688797] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5259e7d8-046f-4afa-3c66-322d366a6aff" [ 1569.688797] env[62619]: _type = "HttpNfcLease" [ 1569.688797] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1569.689632] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d612699-27fb-4e62-b45e-84d2b336e06d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.705162] env[62619]: DEBUG oslo_vmware.rw_handles [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52506795-db81-512c-2709-7cdd0881a2e9/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1569.706673] env[62619]: DEBUG oslo_vmware.rw_handles [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52506795-db81-512c-2709-7cdd0881a2e9/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1569.762954] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.763246] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-811fb594-9f75-4861-b8b0-b08491cae180 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.767608] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.773486] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39280b0e-2faf-4a79-ad14-e19d3fddb883 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.813187] env[62619]: DEBUG nova.compute.manager [req-0ced73b2-22d8-46ec-9fe7-a76fdb845e84 req-b5a48df0-2320-4a67-a6c3-e9db59ca582f service nova] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Detach interface failed, port_id=4603d400-5a47-4629-9915-a5315eeca473, reason: Instance 1f86b805-0fde-4bda-9a94-d440a670e23c could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1569.813280] env[62619]: DEBUG nova.compute.manager [req-0ced73b2-22d8-46ec-9fe7-a76fdb845e84 req-b5a48df0-2320-4a67-a6c3-e9db59ca582f service nova] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Received event network-vif-deleted-8fe434e6-0075-4cc4-b68c-f76dc00d2001 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1569.813728] env[62619]: DEBUG nova.compute.manager [req-0ced73b2-22d8-46ec-9fe7-a76fdb845e84 req-b5a48df0-2320-4a67-a6c3-e9db59ca582f service nova] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Received event network-vif-deleted-3af5cb8d-ea8b-4677-920d-5e06ecc2843b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1569.846062] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777755, 'name': Rename_Task, 'duration_secs': 0.380544} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.846357] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1569.846597] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3b6be2a-d913-4f6c-bf30-a223db3ae9e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.856023] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1569.856023] env[62619]: value = "task-1777757" [ 1569.856023] env[62619]: _type = "Task" [ 1569.856023] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.859095] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-de746705-07dc-488e-8911-e6a1275fc688 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.868923] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777757, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.880115] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777756, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.232752} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.880470] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1569.881307] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f26a3a6-7d71-4e80-847e-1bfa1d1bc26f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.905276] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 5b1008fb-7c0a-4e12-90f8-119a82ea62f1/5b1008fb-7c0a-4e12-90f8-119a82ea62f1.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1569.907589] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-957e9dfe-0afa-4c27-b42f-d3b63956a9de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.928991] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1569.928991] env[62619]: value = "task-1777758" [ 1569.928991] env[62619]: _type = "Task" [ 1569.928991] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.941727] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777758, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.001700] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8ed3337-1bf6-4628-ada5-ecd8b7316b7f tempest-ListImageFiltersTestJSON-2130730250 tempest-ListImageFiltersTestJSON-2130730250-project-member] Lock "6dbe4133-a6ba-4bba-9eb9-47a3d2691eec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.783s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.316382] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1570.316894] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98f54df8-35d3-4f14-ab06-4fc67978eabe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.330018] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1570.330018] env[62619]: value = "task-1777759" [ 1570.330018] env[62619]: _type = "Task" [ 1570.330018] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.338013] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777759, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.364724] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777757, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.448379] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.839596] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777759, 'name': PowerOffVM_Task, 'duration_secs': 0.309277} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.843286] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1570.845675] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1570.845675] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a9ff72-32c5-445f-bd5b-a3addeb26104 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.858184] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1570.862039] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c00e7f0e-6c6e-4f92-8af1-d29b9b82a397 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.870619] env[62619]: DEBUG oslo_vmware.api [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777757, 'name': PowerOnVM_Task, 'duration_secs': 0.786779} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.871040] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1570.872037] env[62619]: INFO nova.compute.manager [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Took 11.30 seconds to spawn the instance on the hypervisor. [ 1570.872327] env[62619]: DEBUG nova.compute.manager [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1570.873331] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39591458-3f6e-4714-b5e3-d09b144aea03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.944210] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777758, 'name': ReconfigVM_Task, 'duration_secs': 0.646205} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.944296] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 5b1008fb-7c0a-4e12-90f8-119a82ea62f1/5b1008fb-7c0a-4e12-90f8-119a82ea62f1.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1570.945811] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8376d78f-785b-4d75-98e1-3bc7308731fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.954508] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1570.954508] env[62619]: value = "task-1777761" [ 1570.954508] env[62619]: _type = "Task" [ 1570.954508] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.955790] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1570.955972] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1570.960306] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleting the datastore file [datastore1] ae37cae9-c82e-4775-8a8f-6bbf9108b0bd {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1570.967032] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3199eec9-a396-432c-a205-613777361ed2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.975898] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777761, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.977720] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1570.977720] env[62619]: value = "task-1777762" [ 1570.977720] env[62619]: _type = "Task" [ 1570.977720] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.988330] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777762, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.118062] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbf1fb5-26ad-420c-8a95-7eece38eea01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.142280] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance 'dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1571.181018] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcb6051-38fb-47a5-b8cf-f527e6496a48 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.194373] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05358f8e-a8e4-4a96-ac4f-0c4dcb4db02d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.237732] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7781dee7-257a-4aaa-a3ee-009d2bfca8b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.248865] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5162f03-f7fb-4308-9fc6-282bae5bfe2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.265957] env[62619]: DEBUG nova.compute.provider_tree [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1571.306282] env[62619]: DEBUG nova.compute.manager [req-4e98ce37-58d7-48fe-8773-7ce97e7567c6 req-a5b65284-d6fa-4bfd-bdce-365f8b88c766 service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Received event network-changed-77589fc3-af02-4235-859e-fbf8a8322155 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1571.306490] env[62619]: DEBUG nova.compute.manager [req-4e98ce37-58d7-48fe-8773-7ce97e7567c6 req-a5b65284-d6fa-4bfd-bdce-365f8b88c766 service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Refreshing instance network info cache due to event network-changed-77589fc3-af02-4235-859e-fbf8a8322155. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1571.306813] env[62619]: DEBUG oslo_concurrency.lockutils [req-4e98ce37-58d7-48fe-8773-7ce97e7567c6 req-a5b65284-d6fa-4bfd-bdce-365f8b88c766 service nova] Acquiring lock "refresh_cache-b1c3c213-599d-4cab-8224-d87467d774c9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.306870] env[62619]: DEBUG oslo_concurrency.lockutils [req-4e98ce37-58d7-48fe-8773-7ce97e7567c6 req-a5b65284-d6fa-4bfd-bdce-365f8b88c766 service nova] Acquired lock "refresh_cache-b1c3c213-599d-4cab-8224-d87467d774c9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.307012] env[62619]: DEBUG nova.network.neutron [req-4e98ce37-58d7-48fe-8773-7ce97e7567c6 req-a5b65284-d6fa-4bfd-bdce-365f8b88c766 service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Refreshing network info cache for port 77589fc3-af02-4235-859e-fbf8a8322155 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1571.397874] env[62619]: INFO nova.compute.manager [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Took 36.03 seconds to build instance. [ 1571.466832] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777761, 'name': Rename_Task, 'duration_secs': 0.288145} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.467709] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1571.468416] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7202872-3c3a-4cb6-8008-6045eaaec031 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.476408] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1571.476408] env[62619]: value = "task-1777763" [ 1571.476408] env[62619]: _type = "Task" [ 1571.476408] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.500603] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777762, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253845} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.504714] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1571.504946] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1571.505207] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1571.507695] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777763, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.653652] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1571.654123] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c86c0c01-e7b9-4028-b44a-b9b9efee1890 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.663151] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1571.663151] env[62619]: value = "task-1777764" [ 1571.663151] env[62619]: _type = "Task" [ 1571.663151] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.675930] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777764, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.771122] env[62619]: DEBUG nova.scheduler.client.report [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1571.900175] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cc614837-0f78-4b35-bb77-cc1091485538 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.265s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.989567] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777763, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.179687] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777764, 'name': PowerOffVM_Task, 'duration_secs': 0.405934} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.179988] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1572.180200] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance 'dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1572.229037] env[62619]: DEBUG nova.network.neutron [req-4e98ce37-58d7-48fe-8773-7ce97e7567c6 req-a5b65284-d6fa-4bfd-bdce-365f8b88c766 service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Updated VIF entry in instance network info cache for port 77589fc3-af02-4235-859e-fbf8a8322155. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1572.229662] env[62619]: DEBUG nova.network.neutron [req-4e98ce37-58d7-48fe-8773-7ce97e7567c6 req-a5b65284-d6fa-4bfd-bdce-365f8b88c766 service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Updating instance_info_cache with network_info: [{"id": "77589fc3-af02-4235-859e-fbf8a8322155", "address": "fa:16:3e:49:48:47", "network": {"id": "655928a1-c906-43b7-b355-a21b4db5f18b", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-689245540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3be1c72bfb5c4cfd9c45ab817384e11b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77589fc3-af", "ovs_interfaceid": "77589fc3-af02-4235-859e-fbf8a8322155", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.277436] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.817s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.277789] env[62619]: DEBUG nova.compute.manager [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1572.280431] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.316s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.281967] env[62619]: INFO nova.compute.claims [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1572.491439] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777763, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.542676] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1572.543599] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1572.543924] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1572.544223] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1572.544468] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1572.544723] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1572.545062] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1572.545331] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1572.545875] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1572.546227] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1572.546518] env[62619]: DEBUG nova.virt.hardware [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1572.547640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5dd974e-307c-4c4a-948f-fd182d49e0a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.557514] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d6e84e-9022-4115-b5bf-0bf7f1ef0b09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.575402] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:73:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52679af0-12c1-41ec-927a-590d8d45fce0', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1572.584199] env[62619]: DEBUG oslo.service.loopingcall [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1572.584727] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1572.584992] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a23647c9-438b-407d-aab6-42bde63695b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.606701] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1572.606701] env[62619]: value = "task-1777765" [ 1572.606701] env[62619]: _type = "Task" [ 1572.606701] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.615230] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777765, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.687560] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1572.688393] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1572.688393] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1572.688516] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1572.688764] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1572.688929] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1572.689194] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1572.689383] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1572.689668] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1572.690010] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1572.690144] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1572.696995] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9460a5e-2218-46a8-b404-4839e80ef8d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.718464] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1572.718464] env[62619]: value = "task-1777766" [ 1572.718464] env[62619]: _type = "Task" [ 1572.718464] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.731331] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777766, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.733116] env[62619]: DEBUG oslo_concurrency.lockutils [req-4e98ce37-58d7-48fe-8773-7ce97e7567c6 req-a5b65284-d6fa-4bfd-bdce-365f8b88c766 service nova] Releasing lock "refresh_cache-b1c3c213-599d-4cab-8224-d87467d774c9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.787719] env[62619]: DEBUG nova.compute.utils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1572.790603] env[62619]: DEBUG nova.compute.manager [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1572.790897] env[62619]: DEBUG nova.network.neutron [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1572.884343] env[62619]: DEBUG nova.policy [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f5836e62ab7440fa798f7bea287572e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed0964fc0c414168b3027730645f7ee8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1572.996859] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777763, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.124663] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777765, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.229819] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777766, 'name': ReconfigVM_Task, 'duration_secs': 0.289854} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.230226] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance 'dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1573.294335] env[62619]: DEBUG nova.compute.manager [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1573.337865] env[62619]: DEBUG nova.network.neutron [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Successfully created port: 2f30eb51-192e-4918-b660-2c50f9d59bb2 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1573.439707] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "769905db-d19a-411f-bb5d-8196056b82aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.440276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "769905db-d19a-411f-bb5d-8196056b82aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.495900] env[62619]: DEBUG oslo_vmware.api [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777763, 'name': PowerOnVM_Task, 'duration_secs': 1.581126} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.498290] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1573.498570] env[62619]: INFO nova.compute.manager [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Took 11.12 seconds to spawn the instance on the hypervisor. [ 1573.498826] env[62619]: DEBUG nova.compute.manager [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1573.500218] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ff0332-edbb-40af-8f03-c5f399baf38f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.623085] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777765, 'name': CreateVM_Task, 'duration_secs': 0.741964} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.623359] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1573.625052] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.625213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.625983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1573.628586] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85094766-5b55-4875-8150-98a9dea31a41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.637639] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1573.637639] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c9bde5-e393-3b87-303c-27e1ad976d03" [ 1573.637639] env[62619]: _type = "Task" [ 1573.637639] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.649576] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c9bde5-e393-3b87-303c-27e1ad976d03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.744966] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:50:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='026a8c7d-034e-431f-86ad-5b594effd325',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-602383372',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1573.746019] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1573.746019] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1573.746019] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1573.746019] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1573.746199] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1573.746512] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1573.747041] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1573.747336] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1573.747599] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1573.750015] env[62619]: DEBUG nova.virt.hardware [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1573.754607] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1573.758263] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48a252cd-111a-4c51-9af9-057824069dbf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.781033] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1573.781033] env[62619]: value = "task-1777767" [ 1573.781033] env[62619]: _type = "Task" [ 1573.781033] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.791704] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777767, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.944947] env[62619]: DEBUG nova.compute.manager [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1574.029129] env[62619]: INFO nova.compute.manager [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Took 37.54 seconds to build instance. [ 1574.037189] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99754bbd-36a5-4fd3-b5d3-d5a117934041 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.045209] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f5ddb9-af47-40b6-bac0-dcca9d6de05f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.087240] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb877c3-64cb-4a56-a2c9-4cbe0f355f7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.098219] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5486ebc-5f21-42d0-80ce-69bd13e65a3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.113209] env[62619]: DEBUG nova.compute.provider_tree [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.148573] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c9bde5-e393-3b87-303c-27e1ad976d03, 'name': SearchDatastore_Task, 'duration_secs': 0.018651} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.148573] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.148740] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1574.148965] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.149120] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.149294] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1574.149561] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18d825c6-bf4e-44d9-8c0d-72a3c7f23eba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.159910] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1574.160116] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1574.161710] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4f9209c-5f23-41e7-808f-6e1b5170e9c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.168333] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1574.168333] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5231da30-8b60-7b93-a6c3-b882f3c82fde" [ 1574.168333] env[62619]: _type = "Task" [ 1574.168333] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.176323] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5231da30-8b60-7b93-a6c3-b882f3c82fde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.289450] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777767, 'name': ReconfigVM_Task, 'duration_secs': 0.269647} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.290531] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1574.290620] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4dd479c-0cb7-4c83-9d53-a12a9a744b3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.307736] env[62619]: DEBUG nova.compute.manager [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1574.317901] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5/dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1574.318375] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74fd85dc-c237-4c11-a93f-001e2a1cc017 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.336995] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1574.336995] env[62619]: value = "task-1777768" [ 1574.336995] env[62619]: _type = "Task" [ 1574.336995] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.342241] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1574.342994] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1574.342994] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1574.342994] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1574.342994] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1574.343176] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1574.343558] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1574.343558] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1574.343648] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1574.343752] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1574.343958] env[62619]: DEBUG nova.virt.hardware [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1574.344749] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05633ddc-1729-4fc9-8033-889f556080d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.352426] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777768, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.355506] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594f4818-8971-4295-acb7-a5a676a1003b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.474255] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.532045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5ad4a4eb-4a80-4dec-b693-f3b667ebe74a tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.375s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.616941] env[62619]: DEBUG nova.scheduler.client.report [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1574.679071] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5231da30-8b60-7b93-a6c3-b882f3c82fde, 'name': SearchDatastore_Task, 'duration_secs': 0.023275} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.679884] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1708ab98-459c-4d8f-9901-cdf42db476fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.685996] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1574.685996] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52403422-ca78-277b-9c1b-35ea781e43b9" [ 1574.685996] env[62619]: _type = "Task" [ 1574.685996] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.694380] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52403422-ca78-277b-9c1b-35ea781e43b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.851537] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777768, 'name': ReconfigVM_Task, 'duration_secs': 0.320269} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.851537] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Reconfigured VM instance instance-00000032 to attach disk [datastore1] dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5/dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1574.851537] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance 'dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1575.097834] env[62619]: DEBUG nova.compute.manager [req-fb4a7f08-e510-4187-9b4c-804451ce02ac req-89ef0cd5-3556-44fe-a403-986ba4f81290 service nova] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Received event network-vif-plugged-2f30eb51-192e-4918-b660-2c50f9d59bb2 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1575.100179] env[62619]: DEBUG oslo_concurrency.lockutils [req-fb4a7f08-e510-4187-9b4c-804451ce02ac req-89ef0cd5-3556-44fe-a403-986ba4f81290 service nova] Acquiring lock "6cd2f6e6-79a4-41be-a349-b504028ecab4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.100179] env[62619]: DEBUG oslo_concurrency.lockutils [req-fb4a7f08-e510-4187-9b4c-804451ce02ac req-89ef0cd5-3556-44fe-a403-986ba4f81290 service nova] Lock "6cd2f6e6-79a4-41be-a349-b504028ecab4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.100179] env[62619]: DEBUG oslo_concurrency.lockutils [req-fb4a7f08-e510-4187-9b4c-804451ce02ac req-89ef0cd5-3556-44fe-a403-986ba4f81290 service nova] Lock "6cd2f6e6-79a4-41be-a349-b504028ecab4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.100179] env[62619]: DEBUG nova.compute.manager [req-fb4a7f08-e510-4187-9b4c-804451ce02ac req-89ef0cd5-3556-44fe-a403-986ba4f81290 service nova] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] No waiting events found dispatching network-vif-plugged-2f30eb51-192e-4918-b660-2c50f9d59bb2 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1575.100179] env[62619]: WARNING nova.compute.manager [req-fb4a7f08-e510-4187-9b4c-804451ce02ac req-89ef0cd5-3556-44fe-a403-986ba4f81290 service nova] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Received unexpected event network-vif-plugged-2f30eb51-192e-4918-b660-2c50f9d59bb2 for instance with vm_state building and task_state spawning. [ 1575.127021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.841s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.127021] env[62619]: DEBUG nova.compute.manager [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1575.127021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.927s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.127021] env[62619]: INFO nova.compute.claims [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1575.204586] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.208024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.208024] env[62619]: DEBUG nova.compute.manager [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1575.208024] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52403422-ca78-277b-9c1b-35ea781e43b9, 'name': SearchDatastore_Task, 'duration_secs': 0.010425} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.208024] env[62619]: DEBUG nova.network.neutron [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Successfully updated port: 2f30eb51-192e-4918-b660-2c50f9d59bb2 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1575.209595] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741ceb4f-0eb9-413b-a6c7-5cd4f5c6a26f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.212466] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.212713] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ae37cae9-c82e-4775-8a8f-6bbf9108b0bd/ae37cae9-c82e-4775-8a8f-6bbf9108b0bd.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1575.213293] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7eaf6519-fa81-4f4e-b5aa-1ac8906a3ce8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.220569] env[62619]: DEBUG nova.compute.manager [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1575.221239] env[62619]: DEBUG nova.objects.instance [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lazy-loading 'flavor' on Instance uuid 5b1008fb-7c0a-4e12-90f8-119a82ea62f1 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1575.225669] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1575.225669] env[62619]: value = "task-1777769" [ 1575.225669] env[62619]: _type = "Task" [ 1575.225669] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.236242] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777769, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.362263] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8be7d1c-c7f8-4418-9cf3-f9c791972b0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.385731] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f1ac6d-92e6-4170-967f-78b3c11b968b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.404882] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance 'dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1575.635803] env[62619]: DEBUG nova.compute.utils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1575.637257] env[62619]: DEBUG nova.compute.manager [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1575.638935] env[62619]: DEBUG nova.network.neutron [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1575.710198] env[62619]: DEBUG nova.policy [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cb2cfd0e64f4035846dbae1a2d3174b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4a61b4c4b2b42a1933ef647b146b530', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1575.714548] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "refresh_cache-6cd2f6e6-79a4-41be-a349-b504028ecab4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.714730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired lock "refresh_cache-6cd2f6e6-79a4-41be-a349-b504028ecab4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.714901] env[62619]: DEBUG nova.network.neutron [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1575.741602] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777769, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.958835] env[62619]: DEBUG nova.network.neutron [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Port b1ace9af-97b6-4d21-bbe4-972a2a1c1e13 binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1576.141413] env[62619]: DEBUG nova.compute.manager [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1576.240914] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1576.240914] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4cfdec1-d706-4df4-9baf-496893ee87d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.247061] env[62619]: DEBUG nova.network.neutron [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Successfully created port: 1535065e-6e20-4745-957d-4e77e36ba2d3 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1576.252532] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777769, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.62616} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.253830] env[62619]: DEBUG nova.network.neutron [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1576.256688] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] ae37cae9-c82e-4775-8a8f-6bbf9108b0bd/ae37cae9-c82e-4775-8a8f-6bbf9108b0bd.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1576.257126] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1576.261601] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-700586bd-482e-4bc3-96b0-3e49cddb3208 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.265068] env[62619]: DEBUG oslo_vmware.api [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1576.265068] env[62619]: value = "task-1777770" [ 1576.265068] env[62619]: _type = "Task" [ 1576.265068] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.272892] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1576.272892] env[62619]: value = "task-1777771" [ 1576.272892] env[62619]: _type = "Task" [ 1576.272892] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.277731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquiring lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.278054] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1576.291400] env[62619]: DEBUG oslo_vmware.api [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777770, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.307021] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777771, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.449944] env[62619]: DEBUG nova.network.neutron [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Updating instance_info_cache with network_info: [{"id": "2f30eb51-192e-4918-b660-2c50f9d59bb2", "address": "fa:16:3e:84:f0:29", "network": {"id": "15f89bb2-20f5-4ac8-8688-bfeed19bc7c0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1093042133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed0964fc0c414168b3027730645f7ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f30eb51-19", "ovs_interfaceid": "2f30eb51-192e-4918-b660-2c50f9d59bb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.761536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f25045e-b042-44fd-b8c8-990fe4aca9e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.773137] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3075a656-4d35-4bc9-804a-dec2b7391b7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.780975] env[62619]: DEBUG oslo_vmware.api [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777770, 'name': PowerOffVM_Task, 'duration_secs': 0.243653} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.785178] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1576.785428] env[62619]: DEBUG nova.compute.manager [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1576.813078] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f565b56-e72f-42d7-aa27-f32a945aec73 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.816150] env[62619]: DEBUG nova.compute.manager [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1576.821063] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37eb00f6-2591-4bbf-a578-366bd595f4a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.827026] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777771, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075817} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.827591] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1576.828487] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df21b9e-d173-4c4b-965a-dd21887d4ad8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.838269] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6348d443-2478-4333-9480-57766705da41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.861972] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] ae37cae9-c82e-4775-8a8f-6bbf9108b0bd/ae37cae9-c82e-4775-8a8f-6bbf9108b0bd.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1576.863480] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cccde74a-60f8-4f9b-91e5-c10df86db25f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.886594] env[62619]: DEBUG nova.compute.provider_tree [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.893863] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1576.893863] env[62619]: value = "task-1777772" [ 1576.893863] env[62619]: _type = "Task" [ 1576.893863] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.902880] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777772, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.953097] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Releasing lock "refresh_cache-6cd2f6e6-79a4-41be-a349-b504028ecab4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.953459] env[62619]: DEBUG nova.compute.manager [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Instance network_info: |[{"id": "2f30eb51-192e-4918-b660-2c50f9d59bb2", "address": "fa:16:3e:84:f0:29", "network": {"id": "15f89bb2-20f5-4ac8-8688-bfeed19bc7c0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1093042133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed0964fc0c414168b3027730645f7ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f30eb51-19", "ovs_interfaceid": "2f30eb51-192e-4918-b660-2c50f9d59bb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1576.953916] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:f0:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2f30eb51-192e-4918-b660-2c50f9d59bb2', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1576.961276] env[62619]: DEBUG oslo.service.loopingcall [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1576.961503] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1576.961728] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41a71943-5c9b-48fd-b4b3-a8196a4c2615 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.990063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.990355] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1576.990477] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.992816] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1576.992816] env[62619]: value = "task-1777773" [ 1576.992816] env[62619]: _type = "Task" [ 1576.992816] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.000847] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777773, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.154331] env[62619]: DEBUG nova.compute.manager [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1577.182994] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1577.183278] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1577.183459] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1577.183652] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1577.183795] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1577.184044] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1577.184279] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1577.184460] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1577.184635] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1577.184796] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1577.185090] env[62619]: DEBUG nova.virt.hardware [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1577.185981] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50aa5e0a-3c90-4072-98ff-e2431d80e447 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.194367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3ce483-f5dd-451e-8b7b-b351e6fea81f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.341372] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.356724] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b417693-b245-4740-8ee1-775afe486b50 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.151s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.390306] env[62619]: DEBUG nova.scheduler.client.report [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1577.404988] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.427805] env[62619]: DEBUG nova.compute.manager [req-a94891c0-6272-4153-8813-640f80e46938 req-07912106-9922-48fd-b35c-572d86ade5f5 service nova] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Received event network-changed-2f30eb51-192e-4918-b660-2c50f9d59bb2 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1577.428210] env[62619]: DEBUG nova.compute.manager [req-a94891c0-6272-4153-8813-640f80e46938 req-07912106-9922-48fd-b35c-572d86ade5f5 service nova] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Refreshing instance network info cache due to event network-changed-2f30eb51-192e-4918-b660-2c50f9d59bb2. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1577.429017] env[62619]: DEBUG oslo_concurrency.lockutils [req-a94891c0-6272-4153-8813-640f80e46938 req-07912106-9922-48fd-b35c-572d86ade5f5 service nova] Acquiring lock "refresh_cache-6cd2f6e6-79a4-41be-a349-b504028ecab4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.429017] env[62619]: DEBUG oslo_concurrency.lockutils [req-a94891c0-6272-4153-8813-640f80e46938 req-07912106-9922-48fd-b35c-572d86ade5f5 service nova] Acquired lock "refresh_cache-6cd2f6e6-79a4-41be-a349-b504028ecab4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.429017] env[62619]: DEBUG nova.network.neutron [req-a94891c0-6272-4153-8813-640f80e46938 req-07912106-9922-48fd-b35c-572d86ade5f5 service nova] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Refreshing network info cache for port 2f30eb51-192e-4918-b660-2c50f9d59bb2 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1577.503300] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777773, 'name': CreateVM_Task, 'duration_secs': 0.382852} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.504312] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1577.504836] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.505013] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.505332] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1577.505835] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b1da988-dad7-446e-94f6-1991efee35cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.511365] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1577.511365] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525779d6-6b95-ac73-d127-b9cdc03c54f3" [ 1577.511365] env[62619]: _type = "Task" [ 1577.511365] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.519520] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525779d6-6b95-ac73-d127-b9cdc03c54f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.895916] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.770s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.896528] env[62619]: DEBUG nova.compute.manager [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1577.900239] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.264s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.902970] env[62619]: INFO nova.compute.claims [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1577.915219] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777772, 'name': ReconfigVM_Task, 'duration_secs': 0.735729} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.915507] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Reconfigured VM instance instance-00000038 to attach disk [datastore1] ae37cae9-c82e-4775-8a8f-6bbf9108b0bd/ae37cae9-c82e-4775-8a8f-6bbf9108b0bd.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1577.916468] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b102839-ed13-4722-b1b9-b7b646e65e4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.922645] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1577.922645] env[62619]: value = "task-1777774" [ 1577.922645] env[62619]: _type = "Task" [ 1577.922645] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.930753] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777774, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.023686] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525779d6-6b95-ac73-d127-b9cdc03c54f3, 'name': SearchDatastore_Task, 'duration_secs': 0.010858} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.024035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.024295] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1578.024543] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.024688] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.024896] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1578.025197] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9afe2c26-ddad-4221-89bb-04af912e0a2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.035865] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1578.036050] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1578.036794] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f32e6e67-e98a-4980-983e-856ac8b08fe8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.042186] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1578.042186] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524b93ff-73cc-d8e1-107e-8b0edabfedc0" [ 1578.042186] env[62619]: _type = "Task" [ 1578.042186] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.050696] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524b93ff-73cc-d8e1-107e-8b0edabfedc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.084989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.085249] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.085429] env[62619]: DEBUG nova.network.neutron [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1578.117538] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.117807] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.178043] env[62619]: DEBUG nova.network.neutron [req-a94891c0-6272-4153-8813-640f80e46938 req-07912106-9922-48fd-b35c-572d86ade5f5 service nova] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Updated VIF entry in instance network info cache for port 2f30eb51-192e-4918-b660-2c50f9d59bb2. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1578.178422] env[62619]: DEBUG nova.network.neutron [req-a94891c0-6272-4153-8813-640f80e46938 req-07912106-9922-48fd-b35c-572d86ade5f5 service nova] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Updating instance_info_cache with network_info: [{"id": "2f30eb51-192e-4918-b660-2c50f9d59bb2", "address": "fa:16:3e:84:f0:29", "network": {"id": "15f89bb2-20f5-4ac8-8688-bfeed19bc7c0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1093042133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed0964fc0c414168b3027730645f7ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f30eb51-19", "ovs_interfaceid": "2f30eb51-192e-4918-b660-2c50f9d59bb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.286485] env[62619]: INFO nova.compute.manager [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Rebuilding instance [ 1578.332736] env[62619]: DEBUG nova.compute.manager [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1578.333623] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cd06a7-7f7d-4f54-b5a4-3e383a24e054 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.348092] env[62619]: DEBUG nova.network.neutron [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Successfully updated port: 1535065e-6e20-4745-957d-4e77e36ba2d3 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1578.407270] env[62619]: DEBUG nova.compute.utils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1578.408992] env[62619]: DEBUG nova.compute.manager [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1578.409192] env[62619]: DEBUG nova.network.neutron [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1578.432470] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777774, 'name': Rename_Task, 'duration_secs': 0.232096} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.432740] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1578.432979] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5bb2c68d-6fa0-414a-8d5d-ba6c1bca6e46 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.438781] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1578.438781] env[62619]: value = "task-1777775" [ 1578.438781] env[62619]: _type = "Task" [ 1578.438781] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.446677] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777775, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.479062] env[62619]: DEBUG nova.policy [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ca01c14f3a3a48abbeefec12aefa2b6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cefd8de3a0c84548b021299e27a3cab7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1578.552311] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524b93ff-73cc-d8e1-107e-8b0edabfedc0, 'name': SearchDatastore_Task, 'duration_secs': 0.01021} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.553126] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48f58ad7-9e82-4a60-ab02-a430da85d784 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.558387] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1578.558387] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d69d4e-9354-4051-43a3-062635190c09" [ 1578.558387] env[62619]: _type = "Task" [ 1578.558387] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.566188] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d69d4e-9354-4051-43a3-062635190c09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.628248] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.628497] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1578.681142] env[62619]: DEBUG oslo_concurrency.lockutils [req-a94891c0-6272-4153-8813-640f80e46938 req-07912106-9922-48fd-b35c-572d86ade5f5 service nova] Releasing lock "refresh_cache-6cd2f6e6-79a4-41be-a349-b504028ecab4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.823751] env[62619]: DEBUG nova.network.neutron [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance_info_cache with network_info: [{"id": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "address": "fa:16:3e:70:d7:d3", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1ace9af-97", "ovs_interfaceid": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.851906] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "refresh_cache-c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.851906] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquired lock "refresh_cache-c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.851906] env[62619]: DEBUG nova.network.neutron [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1578.911920] env[62619]: DEBUG nova.compute.manager [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1578.960152] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777775, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.015374] env[62619]: DEBUG nova.network.neutron [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Successfully created port: cab4b941-1153-4182-984e-3286233288af {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1579.072232] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d69d4e-9354-4051-43a3-062635190c09, 'name': SearchDatastore_Task, 'duration_secs': 0.020842} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.072500] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.072797] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 6cd2f6e6-79a4-41be-a349-b504028ecab4/6cd2f6e6-79a4-41be-a349-b504028ecab4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1579.073045] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9aacdc20-4bc6-4b49-9946-5c54b107373b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.080456] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1579.080456] env[62619]: value = "task-1777776" [ 1579.080456] env[62619]: _type = "Task" [ 1579.080456] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.097487] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777776, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.164063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "refresh_cache-4763e489-5aeb-4dc0-b327-b79a55afdfe3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.164236] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquired lock "refresh_cache-4763e489-5aeb-4dc0-b327-b79a55afdfe3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.164391] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1579.328701] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.354683] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1579.356785] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b28f3ee-d818-4a72-9fb4-80e36e934fd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.365800] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1579.365800] env[62619]: value = "task-1777777" [ 1579.365800] env[62619]: _type = "Task" [ 1579.365800] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.374112] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777777, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.395870] env[62619]: DEBUG nova.network.neutron [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1579.405518] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157ad3ee-6eb7-4ae8-8e35-cacf63ed8abe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.412909] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966e1efa-21c3-45b4-9846-daf060a23efa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.454823] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f69a66-8a80-41d0-b73f-c247990e1269 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.460159] env[62619]: DEBUG nova.compute.manager [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Received event network-vif-plugged-1535065e-6e20-4745-957d-4e77e36ba2d3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1579.460456] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] Acquiring lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.460571] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] Lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.460772] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] Lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.460899] env[62619]: DEBUG nova.compute.manager [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] No waiting events found dispatching network-vif-plugged-1535065e-6e20-4745-957d-4e77e36ba2d3 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1579.461229] env[62619]: WARNING nova.compute.manager [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Received unexpected event network-vif-plugged-1535065e-6e20-4745-957d-4e77e36ba2d3 for instance with vm_state building and task_state spawning. [ 1579.461229] env[62619]: DEBUG nova.compute.manager [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Received event network-changed-1535065e-6e20-4745-957d-4e77e36ba2d3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1579.461349] env[62619]: DEBUG nova.compute.manager [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Refreshing instance network info cache due to event network-changed-1535065e-6e20-4745-957d-4e77e36ba2d3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1579.461502] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] Acquiring lock "refresh_cache-c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.467210] env[62619]: DEBUG oslo_vmware.api [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777775, 'name': PowerOnVM_Task, 'duration_secs': 0.671409} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.469058] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0da3d8f-8084-4e0a-9c7f-52fb180ca5d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.473469] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1579.473699] env[62619]: DEBUG nova.compute.manager [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1579.475054] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76eb5f54-17b2-4460-b17f-d9f37a1c1aa7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.488232] env[62619]: DEBUG nova.compute.provider_tree [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1579.573796] env[62619]: DEBUG nova.network.neutron [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Updating instance_info_cache with network_info: [{"id": "1535065e-6e20-4745-957d-4e77e36ba2d3", "address": "fa:16:3e:0e:a5:16", "network": {"id": "f93b590d-39d9-42a4-8c3f-d05fa75fdf10", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1677764334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4a61b4c4b2b42a1933ef647b146b530", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1535065e-6e", "ovs_interfaceid": "1535065e-6e20-4745-957d-4e77e36ba2d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.592683] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777776, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.854592] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7be832-a07e-4798-b63d-ac5ac2e1a2b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.876673] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36d3297-3570-481f-a717-c7c018d3eb8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.883729] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance 'dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1579.890582] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1579.890820] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1579.891648] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb53041-3eb1-44db-94c2-b01308797d3b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.898348] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1579.898582] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d43d3f5e-e72f-4bf0-b1f2-a4bb25bfc62f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.923515] env[62619]: DEBUG oslo_vmware.rw_handles [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52506795-db81-512c-2709-7cdd0881a2e9/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1579.924505] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a86b6b6-7d6d-4e80-9c1a-8545898dec9e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.930622] env[62619]: DEBUG oslo_vmware.rw_handles [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52506795-db81-512c-2709-7cdd0881a2e9/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1579.930859] env[62619]: ERROR oslo_vmware.rw_handles [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52506795-db81-512c-2709-7cdd0881a2e9/disk-0.vmdk due to incomplete transfer. [ 1579.931176] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6a37a512-599f-42ff-b316-49a5fe5b84c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.938864] env[62619]: DEBUG oslo_vmware.rw_handles [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52506795-db81-512c-2709-7cdd0881a2e9/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1579.939068] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Uploaded image d934e05c-1334-4363-aa23-761cfae0b75b to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1579.941879] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1579.942314] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-61660965-c717-48e5-a34f-777f06e2ac3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.950073] env[62619]: DEBUG nova.compute.manager [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1579.952076] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1579.952076] env[62619]: value = "task-1777779" [ 1579.952076] env[62619]: _type = "Task" [ 1579.952076] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.961082] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777779, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1579.974201] env[62619]: DEBUG nova.virt.hardware [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1579.975469] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2aaf57-23ac-4cb5-8e8b-bfef225292dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.984895] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f62d0a-71ad-4223-8cb7-459fea6a8077 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.002051] env[62619]: DEBUG nova.scheduler.client.report [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1580.011502] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.055039] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1580.055313] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1580.056984] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleting the datastore file [datastore1] 5b1008fb-7c0a-4e12-90f8-119a82ea62f1 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1580.056984] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3b65476-3807-498b-8cba-ebf9d9ae1a44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.062956] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1580.062956] env[62619]: value = "task-1777780" [ 1580.062956] env[62619]: _type = "Task" [ 1580.062956] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.071812] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777780, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.077106] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Releasing lock "refresh_cache-c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.078292] env[62619]: DEBUG nova.compute.manager [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Instance network_info: |[{"id": "1535065e-6e20-4745-957d-4e77e36ba2d3", "address": "fa:16:3e:0e:a5:16", "network": {"id": "f93b590d-39d9-42a4-8c3f-d05fa75fdf10", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1677764334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4a61b4c4b2b42a1933ef647b146b530", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1535065e-6e", "ovs_interfaceid": "1535065e-6e20-4745-957d-4e77e36ba2d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1580.078292] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] Acquired lock "refresh_cache-c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.078292] env[62619]: DEBUG nova.network.neutron [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Refreshing network info cache for port 1535065e-6e20-4745-957d-4e77e36ba2d3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1580.079086] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:a5:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1535065e-6e20-4745-957d-4e77e36ba2d3', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1580.086739] env[62619]: DEBUG oslo.service.loopingcall [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1580.087601] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1580.090993] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7e50c77-0d9d-4425-a672-d2b0b5b2498d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.113588] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777776, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637249} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.114940] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 6cd2f6e6-79a4-41be-a349-b504028ecab4/6cd2f6e6-79a4-41be-a349-b504028ecab4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1580.115175] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1580.115399] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1580.115399] env[62619]: value = "task-1777781" [ 1580.115399] env[62619]: _type = "Task" [ 1580.115399] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.115573] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e90a62b-32fb-4d97-a477-361661f8fb20 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.126124] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777781, 'name': CreateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.127647] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1580.127647] env[62619]: value = "task-1777782" [ 1580.127647] env[62619]: _type = "Task" [ 1580.127647] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.135678] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.393192] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1580.394258] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Updating instance_info_cache with network_info: [{"id": "b80ccdea-ed2b-4257-8c43-ae663d8b8bbc", "address": "fa:16:3e:2d:ca:43", "network": {"id": "c4f36a51-ffa2-46e8-86bf-d0e38a92a350", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1409038918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e471e7b32b154c1db2eac990fd11e539", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb80ccdea-ed", "ovs_interfaceid": "b80ccdea-ed2b-4257-8c43-ae663d8b8bbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.395623] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f972a1ef-29ee-44ee-98e3-501119582aa2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.403074] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1580.403074] env[62619]: value = "task-1777783" [ 1580.403074] env[62619]: _type = "Task" [ 1580.403074] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.418587] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777783, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.463095] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777779, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.511234] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.611s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.511774] env[62619]: DEBUG nova.compute.manager [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1580.514883] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.825s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.515059] env[62619]: DEBUG nova.objects.instance [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1580.574101] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777780, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241014} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.574405] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1580.574575] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1580.574748] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1580.630059] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777781, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.643113] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777782, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.254208} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.643113] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1580.644072] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfae2d05-6cf3-4be1-9017-6e5bf1b2a428 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.671384] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 6cd2f6e6-79a4-41be-a349-b504028ecab4/6cd2f6e6-79a4-41be-a349-b504028ecab4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1580.671916] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-705a464e-5dd2-4a4a-921e-8c1b60d973e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.701677] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1580.701677] env[62619]: value = "task-1777784" [ 1580.701677] env[62619]: _type = "Task" [ 1580.701677] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.713487] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777784, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.813314] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.813635] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.813884] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.814451] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.814775] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.817370] env[62619]: INFO nova.compute.manager [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Terminating instance [ 1580.840734] env[62619]: DEBUG nova.network.neutron [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Updated VIF entry in instance network info cache for port 1535065e-6e20-4745-957d-4e77e36ba2d3. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1580.841181] env[62619]: DEBUG nova.network.neutron [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Updating instance_info_cache with network_info: [{"id": "1535065e-6e20-4745-957d-4e77e36ba2d3", "address": "fa:16:3e:0e:a5:16", "network": {"id": "f93b590d-39d9-42a4-8c3f-d05fa75fdf10", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1677764334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4a61b4c4b2b42a1933ef647b146b530", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1535065e-6e", "ovs_interfaceid": "1535065e-6e20-4745-957d-4e77e36ba2d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.898272] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Releasing lock "refresh_cache-4763e489-5aeb-4dc0-b327-b79a55afdfe3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.898498] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1580.898713] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.898868] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.899021] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.899185] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.899327] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.899509] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.899677] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1580.899835] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.912350] env[62619]: DEBUG oslo_vmware.api [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777783, 'name': PowerOnVM_Task, 'duration_secs': 0.3918} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.913289] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1580.913477] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aef679e9-79c4-4670-b17e-7f58aacbac6a tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance 'dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1580.965025] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777779, 'name': Destroy_Task, 'duration_secs': 0.971537} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.965025] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Destroyed the VM [ 1580.965025] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1580.965025] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6bc6fbee-85de-40a6-8890-d464ca5d894c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.971306] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1580.971306] env[62619]: value = "task-1777785" [ 1580.971306] env[62619]: _type = "Task" [ 1580.971306] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.980543] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777785, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.010939] env[62619]: DEBUG nova.network.neutron [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Successfully updated port: cab4b941-1153-4182-984e-3286233288af {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1581.019248] env[62619]: DEBUG nova.compute.utils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1581.020901] env[62619]: DEBUG nova.compute.manager [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1581.020901] env[62619]: DEBUG nova.network.neutron [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1581.102855] env[62619]: DEBUG nova.policy [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f5869dd1daa4009af5ae44a195fb8a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7dea13f34f140dd98291849f66720ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1581.131020] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777781, 'name': CreateVM_Task, 'duration_secs': 0.775456} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.131020] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1581.131020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.131020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.131020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1581.131020] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7481efed-08af-422d-a162-8448a288b8af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.138064] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1581.138064] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c9d0fa-1a03-0f27-cfde-67e509490edb" [ 1581.138064] env[62619]: _type = "Task" [ 1581.138064] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.147117] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c9d0fa-1a03-0f27-cfde-67e509490edb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.212324] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777784, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.321166] env[62619]: DEBUG nova.compute.manager [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1581.321601] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1581.322585] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e9cad3-b703-4b6b-b466-841efadad7ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.332018] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1581.332018] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66472e96-a3fd-445d-b475-134e55d8fda8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.336829] env[62619]: DEBUG oslo_vmware.api [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1581.336829] env[62619]: value = "task-1777786" [ 1581.336829] env[62619]: _type = "Task" [ 1581.336829] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.346260] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ce298c0-ef70-4d67-beac-495d47e8fd9b req-308c70b7-ae47-4e8e-bd1e-dcbaf075f2cb service nova] Releasing lock "refresh_cache-c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.346991] env[62619]: DEBUG oslo_vmware.api [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.403101] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.482805] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777785, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.514271] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquiring lock "refresh_cache-4b2e9965-cbd4-4d98-b003-436b4a8c913e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.514476] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquired lock "refresh_cache-4b2e9965-cbd4-4d98-b003-436b4a8c913e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.514636] env[62619]: DEBUG nova.network.neutron [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1581.524066] env[62619]: DEBUG nova.compute.utils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1581.532341] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e531891d-87dd-474d-9284-19835c49e128 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.532386] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.078s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.532948] env[62619]: DEBUG nova.objects.instance [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lazy-loading 'resources' on Instance uuid fb231b38-950e-4c86-bfe5-4c10a304910f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1581.623448] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1581.623714] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1581.623864] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1581.624113] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1581.624272] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1581.624416] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1581.624618] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1581.624772] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1581.624935] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1581.625125] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1581.625299] env[62619]: DEBUG nova.virt.hardware [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1581.626198] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e6b240-8b53-4444-890c-2baa4257d8bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.635846] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e40f46c-ed8d-4600-9207-e8e4c9afc1f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.647456] env[62619]: DEBUG nova.network.neutron [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Successfully created port: fac1bf3e-4c83-47cb-9a34-2a16035eb800 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1581.661641] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c9d0fa-1a03-0f27-cfde-67e509490edb, 'name': SearchDatastore_Task, 'duration_secs': 0.012474} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.662151] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:78:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61e78327-dbb0-497b-be4b-14a0a1d8a5de', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1581.669598] env[62619]: DEBUG oslo.service.loopingcall [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1581.671193] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.671425] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1581.671719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.672306] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.672306] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1581.672444] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1581.675204] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e131970-ca55-4d41-9a8b-12f15dd2ff6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.677562] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba08e98b-73f6-451b-96bc-4abc89be443c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.692201] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "cbff225f-2d11-4a43-a320-95dd3afb8e48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.692438] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.698600] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1581.698600] env[62619]: value = "task-1777787" [ 1581.698600] env[62619]: _type = "Task" [ 1581.698600] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.702522] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1581.702692] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1581.706295] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7f5add1-f514-41b2-aa4f-a288e324f115 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.711828] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777787, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.716123] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1581.716123] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a5637b-476b-4329-41c2-73212c3285ca" [ 1581.716123] env[62619]: _type = "Task" [ 1581.716123] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.717454] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777784, 'name': ReconfigVM_Task, 'duration_secs': 0.52564} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.720272] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 6cd2f6e6-79a4-41be-a349-b504028ecab4/6cd2f6e6-79a4-41be-a349-b504028ecab4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1581.722061] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d7a80256-bdcf-4d57-8407-db1615a701c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.728613] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a5637b-476b-4329-41c2-73212c3285ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.729561] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1581.729561] env[62619]: value = "task-1777788" [ 1581.729561] env[62619]: _type = "Task" [ 1581.729561] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.731633] env[62619]: DEBUG nova.compute.manager [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Received event network-vif-plugged-cab4b941-1153-4182-984e-3286233288af {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1581.731848] env[62619]: DEBUG oslo_concurrency.lockutils [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] Acquiring lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.732124] env[62619]: DEBUG oslo_concurrency.lockutils [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] Lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.732399] env[62619]: DEBUG oslo_concurrency.lockutils [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] Lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.732510] env[62619]: DEBUG nova.compute.manager [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] No waiting events found dispatching network-vif-plugged-cab4b941-1153-4182-984e-3286233288af {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1581.732726] env[62619]: WARNING nova.compute.manager [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Received unexpected event network-vif-plugged-cab4b941-1153-4182-984e-3286233288af for instance with vm_state building and task_state spawning. [ 1581.733044] env[62619]: DEBUG nova.compute.manager [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Received event network-changed-cab4b941-1153-4182-984e-3286233288af {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1581.733202] env[62619]: DEBUG nova.compute.manager [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Refreshing instance network info cache due to event network-changed-cab4b941-1153-4182-984e-3286233288af. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1581.733334] env[62619]: DEBUG oslo_concurrency.lockutils [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] Acquiring lock "refresh_cache-4b2e9965-cbd4-4d98-b003-436b4a8c913e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.742760] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777788, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.847960] env[62619]: DEBUG oslo_vmware.api [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777786, 'name': PowerOffVM_Task, 'duration_secs': 0.210047} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.848552] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1581.848728] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1581.849025] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f40dd9c-6baf-4a69-a714-06f0377769fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.983441] env[62619]: DEBUG oslo_vmware.api [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777785, 'name': RemoveSnapshot_Task, 'duration_secs': 0.900858} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.983838] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1581.984086] env[62619]: INFO nova.compute.manager [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Took 17.57 seconds to snapshot the instance on the hypervisor. [ 1582.037307] env[62619]: DEBUG nova.compute.manager [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1582.059848] env[62619]: DEBUG nova.network.neutron [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1582.097208] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1582.097461] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1582.097643] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleting the datastore file [datastore1] ae37cae9-c82e-4775-8a8f-6bbf9108b0bd {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1582.097926] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92599121-22f1-4123-9714-f675a555a12d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.113377] env[62619]: DEBUG oslo_vmware.api [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1582.113377] env[62619]: value = "task-1777790" [ 1582.113377] env[62619]: _type = "Task" [ 1582.113377] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.127233] env[62619]: DEBUG oslo_vmware.api [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.210397] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777787, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.227920] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a5637b-476b-4329-41c2-73212c3285ca, 'name': SearchDatastore_Task, 'duration_secs': 0.03367} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.229167] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-979794a7-f607-4ac1-accd-7c5d4048ec6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.241065] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1582.241065] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5216dde3-67d8-fd8d-fa7a-8fd848d5d655" [ 1582.241065] env[62619]: _type = "Task" [ 1582.241065] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.244984] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777788, 'name': Rename_Task, 'duration_secs': 0.381932} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.250487] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1582.251874] env[62619]: DEBUG nova.network.neutron [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Updating instance_info_cache with network_info: [{"id": "cab4b941-1153-4182-984e-3286233288af", "address": "fa:16:3e:3d:52:06", "network": {"id": "5cd626b8-8154-4952-96dc-0dbeda683bf4", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-944646948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cefd8de3a0c84548b021299e27a3cab7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcab4b941-11", "ovs_interfaceid": "cab4b941-1153-4182-984e-3286233288af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.253060] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13b78862-984f-44de-bd67-34601795c4fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.261015] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5216dde3-67d8-fd8d-fa7a-8fd848d5d655, 'name': SearchDatastore_Task, 'duration_secs': 0.012924} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.264316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.264789] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3/c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1582.264881] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1582.264881] env[62619]: value = "task-1777791" [ 1582.264881] env[62619]: _type = "Task" [ 1582.264881] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.265847] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afba9fe9-4c6f-4d72-adc6-aa4f04c0ba1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.277011] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777791, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.280472] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1582.280472] env[62619]: value = "task-1777792" [ 1582.280472] env[62619]: _type = "Task" [ 1582.280472] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.288873] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777792, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.537497] env[62619]: DEBUG nova.compute.manager [None req-92a06624-dd91-4897-8072-5204a0e13b15 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Found 2 images (rotation: 2) {{(pid=62619) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4924}} [ 1582.611329] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394005fb-e396-4c08-8442-bf4f7593c1e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.627756] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cd2eca-0da3-4952-9f6d-92aa3135c79f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.631450] env[62619]: DEBUG oslo_vmware.api [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777790, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.34778} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.631764] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1582.631984] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1582.632224] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1582.632426] env[62619]: INFO nova.compute.manager [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1582.632693] env[62619]: DEBUG oslo.service.loopingcall [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1582.633283] env[62619]: DEBUG nova.compute.manager [-] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1582.633423] env[62619]: DEBUG nova.network.neutron [-] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1582.663441] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ec48b0-7770-4231-b271-6a344b30bb1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.672511] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71188e01-48f4-41c3-874a-156c93beba84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.686956] env[62619]: DEBUG nova.compute.provider_tree [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1582.708743] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777787, 'name': CreateVM_Task, 'duration_secs': 0.711333} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.709578] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1582.710339] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.710506] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.710834] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1582.711106] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d91c5146-0291-4af9-a7b2-b7ac849b1731 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.715822] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1582.715822] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5279d3a2-8010-6fc2-c787-b73ad33dca54" [ 1582.715822] env[62619]: _type = "Task" [ 1582.715822] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.723747] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5279d3a2-8010-6fc2-c787-b73ad33dca54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.757495] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Releasing lock "refresh_cache-4b2e9965-cbd4-4d98-b003-436b4a8c913e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.757495] env[62619]: DEBUG nova.compute.manager [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Instance network_info: |[{"id": "cab4b941-1153-4182-984e-3286233288af", "address": "fa:16:3e:3d:52:06", "network": {"id": "5cd626b8-8154-4952-96dc-0dbeda683bf4", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-944646948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cefd8de3a0c84548b021299e27a3cab7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcab4b941-11", "ovs_interfaceid": "cab4b941-1153-4182-984e-3286233288af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1582.757495] env[62619]: DEBUG oslo_concurrency.lockutils [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] Acquired lock "refresh_cache-4b2e9965-cbd4-4d98-b003-436b4a8c913e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.757495] env[62619]: DEBUG nova.network.neutron [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Refreshing network info cache for port cab4b941-1153-4182-984e-3286233288af {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1582.758834] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:52:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a31c4b8-5b72-4f32-aab3-c4e963e684dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cab4b941-1153-4182-984e-3286233288af', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1582.766854] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Creating folder: Project (cefd8de3a0c84548b021299e27a3cab7). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1582.770979] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d59f252a-1859-48bc-b5c9-49208dc70841 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.784993] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777791, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.789574] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Created folder: Project (cefd8de3a0c84548b021299e27a3cab7) in parent group-v368875. [ 1582.789827] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Creating folder: Instances. Parent ref: group-v369050. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1582.790166] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb7051a5-a192-48cb-9839-0a2524b36767 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.797468] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777792, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.799782] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Created folder: Instances in parent group-v369050. [ 1582.800065] env[62619]: DEBUG oslo.service.loopingcall [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1582.800247] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1582.800463] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8cefa4be-606b-4568-841d-51c79f0690ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.823234] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1582.823234] env[62619]: value = "task-1777795" [ 1582.823234] env[62619]: _type = "Task" [ 1582.823234] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.834609] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777795, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.026579] env[62619]: DEBUG nova.network.neutron [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Port b1ace9af-97b6-4d21-bbe4-972a2a1c1e13 binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1583.026997] env[62619]: DEBUG oslo_concurrency.lockutils [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.027138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.027353] env[62619]: DEBUG nova.network.neutron [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1583.046396] env[62619]: DEBUG nova.compute.manager [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1583.088239] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:48:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='60861953',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1365947631',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1583.091293] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1583.091588] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1583.092362] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1583.092362] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1583.092512] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1583.092878] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1583.093165] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1583.093447] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1583.093763] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1583.095111] env[62619]: DEBUG nova.virt.hardware [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1583.095869] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa20ff4-d8a0-4e8a-9f03-c832a90733b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.104398] env[62619]: DEBUG nova.compute.manager [req-b6aacb75-5e45-46f2-a77e-9d353d98c370 req-68fbd5b5-c560-4ef5-b1e2-43f917df4144 service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Received event network-vif-deleted-52679af0-12c1-41ec-927a-590d8d45fce0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1583.104779] env[62619]: INFO nova.compute.manager [req-b6aacb75-5e45-46f2-a77e-9d353d98c370 req-68fbd5b5-c560-4ef5-b1e2-43f917df4144 service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Neutron deleted interface 52679af0-12c1-41ec-927a-590d8d45fce0; detaching it from the instance and deleting it from the info cache [ 1583.104981] env[62619]: DEBUG nova.network.neutron [req-b6aacb75-5e45-46f2-a77e-9d353d98c370 req-68fbd5b5-c560-4ef5-b1e2-43f917df4144 service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.112425] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe4d6cb-173d-4edd-ab15-025e46a49cf7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.190938] env[62619]: DEBUG nova.scheduler.client.report [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1583.227563] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5279d3a2-8010-6fc2-c787-b73ad33dca54, 'name': SearchDatastore_Task, 'duration_secs': 0.073324} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.227879] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.228135] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1583.228371] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.228516] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.228691] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1583.228989] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25dd710f-de46-485e-82cd-c4cb5700f926 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.238077] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1583.238291] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1583.239316] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-237b3ebd-6f9a-47ea-954e-2a9a637853c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.245445] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1583.245445] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b58092-e1b3-1c6b-4478-26ed70275be6" [ 1583.245445] env[62619]: _type = "Task" [ 1583.245445] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.254628] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b58092-e1b3-1c6b-4478-26ed70275be6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.283526] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777791, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.293508] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777792, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.849889} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.293813] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3/c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1583.294070] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1583.294350] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95f7728d-6954-4742-adce-290eb2ceaa07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.301451] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1583.301451] env[62619]: value = "task-1777796" [ 1583.301451] env[62619]: _type = "Task" [ 1583.301451] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.310885] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777796, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.334850] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777795, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.403611] env[62619]: DEBUG nova.network.neutron [-] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.408895] env[62619]: DEBUG nova.network.neutron [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Updated VIF entry in instance network info cache for port cab4b941-1153-4182-984e-3286233288af. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1583.409280] env[62619]: DEBUG nova.network.neutron [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Updating instance_info_cache with network_info: [{"id": "cab4b941-1153-4182-984e-3286233288af", "address": "fa:16:3e:3d:52:06", "network": {"id": "5cd626b8-8154-4952-96dc-0dbeda683bf4", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-944646948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cefd8de3a0c84548b021299e27a3cab7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcab4b941-11", "ovs_interfaceid": "cab4b941-1153-4182-984e-3286233288af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.607763] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f0790333-8c1d-4f02-bfb4-fa8db57c9981 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.620168] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d66a1ee-69df-4f5c-9dc8-54fab6ea3b1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.656112] env[62619]: DEBUG nova.compute.manager [req-b6aacb75-5e45-46f2-a77e-9d353d98c370 req-68fbd5b5-c560-4ef5-b1e2-43f917df4144 service nova] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Detach interface failed, port_id=52679af0-12c1-41ec-927a-590d8d45fce0, reason: Instance ae37cae9-c82e-4775-8a8f-6bbf9108b0bd could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1583.697441] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.699720] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.902s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.701316] env[62619]: INFO nova.compute.claims [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1583.724907] env[62619]: INFO nova.scheduler.client.report [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Deleted allocations for instance fb231b38-950e-4c86-bfe5-4c10a304910f [ 1583.760726] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b58092-e1b3-1c6b-4478-26ed70275be6, 'name': SearchDatastore_Task, 'duration_secs': 0.019346} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.762139] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a846b75a-ab7e-4526-bc40-4848b9fb1f3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.768430] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1583.768430] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520124a4-d82a-bed9-833e-e989365b5727" [ 1583.768430] env[62619]: _type = "Task" [ 1583.768430] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.777031] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520124a4-d82a-bed9-833e-e989365b5727, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.785241] env[62619]: DEBUG oslo_vmware.api [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777791, 'name': PowerOnVM_Task, 'duration_secs': 1.177375} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.785404] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1583.785657] env[62619]: INFO nova.compute.manager [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Took 9.48 seconds to spawn the instance on the hypervisor. [ 1583.785771] env[62619]: DEBUG nova.compute.manager [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1583.786590] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c29f54c-292a-46f3-a1de-2a6ca83ee907 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.812958] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777796, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064761} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.814469] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1583.816495] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9b3877-1cf1-4db8-aa23-e2579eb4f06f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.846426] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3/c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1583.848770] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a06f8fa-40cf-4857-a856-b995176daca3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.866507] env[62619]: DEBUG nova.network.neutron [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance_info_cache with network_info: [{"id": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "address": "fa:16:3e:70:d7:d3", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1ace9af-97", "ovs_interfaceid": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.871288] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777795, 'name': CreateVM_Task, 'duration_secs': 0.776131} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.872563] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1583.873142] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1583.873142] env[62619]: value = "task-1777797" [ 1583.873142] env[62619]: _type = "Task" [ 1583.873142] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.873727] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.873873] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.874271] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1583.874555] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d96742bd-a489-4a01-8ccd-7449e0aef5a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.884800] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for the task: (returnval){ [ 1583.884800] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523fe97f-f2f7-3d0c-6ed4-036ff2a1756c" [ 1583.884800] env[62619]: _type = "Task" [ 1583.884800] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.888645] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.897667] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523fe97f-f2f7-3d0c-6ed4-036ff2a1756c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.906595] env[62619]: INFO nova.compute.manager [-] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Took 1.27 seconds to deallocate network for instance. [ 1583.912066] env[62619]: DEBUG oslo_concurrency.lockutils [req-19332b37-fc5d-4cf1-953b-bdc11c95266e req-e6240cb8-75f0-4776-a15d-68406323af03 service nova] Releasing lock "refresh_cache-4b2e9965-cbd4-4d98-b003-436b4a8c913e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.237157] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90ce9a50-ce9f-4e80-a969-25bf3383ad80 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "fb231b38-950e-4c86-bfe5-4c10a304910f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.454s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.264790] env[62619]: DEBUG nova.network.neutron [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Successfully updated port: fac1bf3e-4c83-47cb-9a34-2a16035eb800 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1584.282346] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520124a4-d82a-bed9-833e-e989365b5727, 'name': SearchDatastore_Task, 'duration_secs': 0.02105} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.282588] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.286022] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 5b1008fb-7c0a-4e12-90f8-119a82ea62f1/5b1008fb-7c0a-4e12-90f8-119a82ea62f1.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1584.286022] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b306d35-e97e-4202-8348-8c4fc9b087d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.290506] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1584.290506] env[62619]: value = "task-1777798" [ 1584.290506] env[62619]: _type = "Task" [ 1584.290506] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.302401] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.304730] env[62619]: INFO nova.compute.manager [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Took 29.88 seconds to build instance. [ 1584.342021] env[62619]: DEBUG nova.compute.manager [req-668ea454-b812-4616-8fa7-4a4b61980eaf req-8a27280f-46de-41c0-81cc-743aeb368edb service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Received event network-vif-plugged-fac1bf3e-4c83-47cb-9a34-2a16035eb800 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1584.342021] env[62619]: DEBUG oslo_concurrency.lockutils [req-668ea454-b812-4616-8fa7-4a4b61980eaf req-8a27280f-46de-41c0-81cc-743aeb368edb service nova] Acquiring lock "a6ba8114-0261-4894-98c0-9e0360f6d256-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.342021] env[62619]: DEBUG oslo_concurrency.lockutils [req-668ea454-b812-4616-8fa7-4a4b61980eaf req-8a27280f-46de-41c0-81cc-743aeb368edb service nova] Lock "a6ba8114-0261-4894-98c0-9e0360f6d256-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.342021] env[62619]: DEBUG oslo_concurrency.lockutils [req-668ea454-b812-4616-8fa7-4a4b61980eaf req-8a27280f-46de-41c0-81cc-743aeb368edb service nova] Lock "a6ba8114-0261-4894-98c0-9e0360f6d256-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.342021] env[62619]: DEBUG nova.compute.manager [req-668ea454-b812-4616-8fa7-4a4b61980eaf req-8a27280f-46de-41c0-81cc-743aeb368edb service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] No waiting events found dispatching network-vif-plugged-fac1bf3e-4c83-47cb-9a34-2a16035eb800 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1584.342021] env[62619]: WARNING nova.compute.manager [req-668ea454-b812-4616-8fa7-4a4b61980eaf req-8a27280f-46de-41c0-81cc-743aeb368edb service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Received unexpected event network-vif-plugged-fac1bf3e-4c83-47cb-9a34-2a16035eb800 for instance with vm_state building and task_state spawning. [ 1584.374035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.386285] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777797, 'name': ReconfigVM_Task, 'duration_secs': 0.372466} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.386734] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Reconfigured VM instance instance-0000003d to attach disk [datastore1] c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3/c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1584.387516] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0daae72f-09d3-470b-8862-e8e3b2b1a092 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.398255] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523fe97f-f2f7-3d0c-6ed4-036ff2a1756c, 'name': SearchDatastore_Task, 'duration_secs': 0.033369} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.400651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.400651] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1584.400651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.400651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.400651] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1584.401035] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1584.401035] env[62619]: value = "task-1777799" [ 1584.401035] env[62619]: _type = "Task" [ 1584.401035] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.401404] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c87ff0ea-39fd-4c6e-a311-a4ea4be53cac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.412571] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777799, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.413615] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.415116] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1584.415427] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1584.416571] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0536283-4545-4627-a959-a20f2cedbaaf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.421561] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for the task: (returnval){ [ 1584.421561] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527204c0-476c-4d8c-ae91-b8620aaeb41d" [ 1584.421561] env[62619]: _type = "Task" [ 1584.421561] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.430746] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527204c0-476c-4d8c-ae91-b8620aaeb41d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.509942] env[62619]: DEBUG nova.compute.manager [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1584.511273] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db90311-137c-4269-b13f-94fc35e9b107 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.767695] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "refresh_cache-a6ba8114-0261-4894-98c0-9e0360f6d256" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.767847] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquired lock "refresh_cache-a6ba8114-0261-4894-98c0-9e0360f6d256" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.767960] env[62619]: DEBUG nova.network.neutron [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1584.804810] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.806386] env[62619]: DEBUG oslo_concurrency.lockutils [None req-027b32ec-4aba-4a5e-9d14-00f5281dbb9f tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "6cd2f6e6-79a4-41be-a349-b504028ecab4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.392s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.878613] env[62619]: DEBUG nova.compute.manager [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62619) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1584.879457] env[62619]: DEBUG oslo_concurrency.lockutils [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.914431] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777799, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.933594] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527204c0-476c-4d8c-ae91-b8620aaeb41d, 'name': SearchDatastore_Task, 'duration_secs': 0.013126} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.937453] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e674ebda-4b60-464e-a272-7200b65799a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.944714] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for the task: (returnval){ [ 1584.944714] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7049d-5501-c60c-e9b8-740bc91071ab" [ 1584.944714] env[62619]: _type = "Task" [ 1584.944714] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.961265] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7049d-5501-c60c-e9b8-740bc91071ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.021445] env[62619]: INFO nova.compute.manager [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] instance snapshotting [ 1585.022147] env[62619]: DEBUG nova.objects.instance [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'flavor' on Instance uuid da806d3f-79f0-4188-a2d8-0beeb9dfec1a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1585.197320] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d081b7e-0fb8-4946-842a-fe220ce648c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.205340] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ab9730-9b13-485e-b87c-3876e18da82f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.238166] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e663c0bb-0250-4f4f-bf88-792b3bda9426 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.246243] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939a34c1-26c0-4178-beaa-d941664926d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.259578] env[62619]: DEBUG nova.compute.provider_tree [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1585.301221] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777798, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.921618} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.301352] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 5b1008fb-7c0a-4e12-90f8-119a82ea62f1/5b1008fb-7c0a-4e12-90f8-119a82ea62f1.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1585.301570] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1585.301813] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70af28fa-1470-4050-b346-c6711b3f30dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.308095] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1585.308095] env[62619]: value = "task-1777800" [ 1585.308095] env[62619]: _type = "Task" [ 1585.308095] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.311397] env[62619]: DEBUG nova.compute.manager [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1585.319694] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777800, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.329092] env[62619]: DEBUG nova.network.neutron [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1585.415529] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777799, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.454648] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7049d-5501-c60c-e9b8-740bc91071ab, 'name': SearchDatastore_Task, 'duration_secs': 0.065287} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.454985] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.455398] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4b2e9965-cbd4-4d98-b003-436b4a8c913e/4b2e9965-cbd4-4d98-b003-436b4a8c913e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1585.455666] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-846bc02b-55f8-4109-a177-0a480b0d933c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.462748] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for the task: (returnval){ [ 1585.462748] env[62619]: value = "task-1777801" [ 1585.462748] env[62619]: _type = "Task" [ 1585.462748] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.473638] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777801, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.528397] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbef6e72-a44f-4726-9a39-9d42f7cfa75e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.548705] env[62619]: DEBUG nova.network.neutron [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Updating instance_info_cache with network_info: [{"id": "fac1bf3e-4c83-47cb-9a34-2a16035eb800", "address": "fa:16:3e:b5:f2:52", "network": {"id": "712c7a9c-8039-44f9-91d1-27991eef432a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-366397882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7dea13f34f140dd98291849f66720ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfac1bf3e-4c", "ovs_interfaceid": "fac1bf3e-4c83-47cb-9a34-2a16035eb800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.553018] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde88905-2c16-4d66-890a-7822b945e3e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.763360] env[62619]: DEBUG nova.scheduler.client.report [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1585.819482] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777800, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.191818} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.821529] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1585.822670] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595fe709-fe85-48b5-b211-5707cc94cde1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.844390] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 5b1008fb-7c0a-4e12-90f8-119a82ea62f1/5b1008fb-7c0a-4e12-90f8-119a82ea62f1.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1585.847871] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.847871] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9dec321-f723-40a8-a74f-6f2aa0874a23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.865014] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1585.865014] env[62619]: value = "task-1777802" [ 1585.865014] env[62619]: _type = "Task" [ 1585.865014] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.872347] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777802, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.912361] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777799, 'name': Rename_Task, 'duration_secs': 1.153346} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.912606] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1585.912831] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7aba147a-322d-436d-969a-d08b16d2b8cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.917986] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1585.917986] env[62619]: value = "task-1777803" [ 1585.917986] env[62619]: _type = "Task" [ 1585.917986] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.932820] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.972476] env[62619]: DEBUG nova.compute.manager [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1585.972782] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777801, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.973536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44607033-e94b-4784-bf3c-37e57386ad93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.055075] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Releasing lock "refresh_cache-a6ba8114-0261-4894-98c0-9e0360f6d256" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.055075] env[62619]: DEBUG nova.compute.manager [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Instance network_info: |[{"id": "fac1bf3e-4c83-47cb-9a34-2a16035eb800", "address": "fa:16:3e:b5:f2:52", "network": {"id": "712c7a9c-8039-44f9-91d1-27991eef432a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-366397882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7dea13f34f140dd98291849f66720ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfac1bf3e-4c", "ovs_interfaceid": "fac1bf3e-4c83-47cb-9a34-2a16035eb800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1586.055690] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:f2:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02092ea4-bae0-4e42-b0ab-abc365b4395a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fac1bf3e-4c83-47cb-9a34-2a16035eb800', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1586.063224] env[62619]: DEBUG oslo.service.loopingcall [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1586.064240] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1586.064504] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1586.064738] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5d812e5a-37e7-4800-898d-3b11d8145e44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.067658] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9caf0bc8-1da6-4084-b5a1-a4f572882882 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.090619] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1586.090619] env[62619]: value = "task-1777805" [ 1586.090619] env[62619]: _type = "Task" [ 1586.090619] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.094973] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1586.094973] env[62619]: value = "task-1777804" [ 1586.094973] env[62619]: _type = "Task" [ 1586.094973] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.100911] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777805, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.105789] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777804, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.268787] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.569s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.269453] env[62619]: DEBUG nova.compute.manager [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1586.272475] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.444s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.274316] env[62619]: INFO nova.compute.claims [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1586.375995] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777802, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.419697] env[62619]: DEBUG nova.compute.manager [req-9a0bf06e-8d26-4d35-97d7-16827cead76a req-a09e486a-83f3-4e77-adb5-c1993de04a77 service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Received event network-changed-fac1bf3e-4c83-47cb-9a34-2a16035eb800 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1586.419955] env[62619]: DEBUG nova.compute.manager [req-9a0bf06e-8d26-4d35-97d7-16827cead76a req-a09e486a-83f3-4e77-adb5-c1993de04a77 service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Refreshing instance network info cache due to event network-changed-fac1bf3e-4c83-47cb-9a34-2a16035eb800. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1586.420185] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a0bf06e-8d26-4d35-97d7-16827cead76a req-a09e486a-83f3-4e77-adb5-c1993de04a77 service nova] Acquiring lock "refresh_cache-a6ba8114-0261-4894-98c0-9e0360f6d256" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.420323] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a0bf06e-8d26-4d35-97d7-16827cead76a req-a09e486a-83f3-4e77-adb5-c1993de04a77 service nova] Acquired lock "refresh_cache-a6ba8114-0261-4894-98c0-9e0360f6d256" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.420493] env[62619]: DEBUG nova.network.neutron [req-9a0bf06e-8d26-4d35-97d7-16827cead76a req-a09e486a-83f3-4e77-adb5-c1993de04a77 service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Refreshing network info cache for port fac1bf3e-4c83-47cb-9a34-2a16035eb800 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1586.442342] env[62619]: DEBUG oslo_vmware.api [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777803, 'name': PowerOnVM_Task, 'duration_secs': 0.463093} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.442613] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1586.442806] env[62619]: INFO nova.compute.manager [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Took 9.29 seconds to spawn the instance on the hypervisor. [ 1586.442980] env[62619]: DEBUG nova.compute.manager [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1586.443780] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6801133f-aded-4880-a340-38ec18f14b02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.474117] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777801, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.484354] env[62619]: INFO nova.compute.manager [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] instance snapshotting [ 1586.487649] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6f589d-9f82-4ece-abf0-1667bf24a389 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.510252] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdb1fbe-7413-4875-8238-d75f2c2237e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.601871] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777805, 'name': CreateVM_Task, 'duration_secs': 0.509187} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.602405] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1586.603114] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.603284] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.603599] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1586.603860] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b329123-6cf9-489a-87ae-dbbdf591ca98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.608164] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777804, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.611121] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1586.611121] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528f5413-c546-e507-9820-c23ee9c8aa97" [ 1586.611121] env[62619]: _type = "Task" [ 1586.611121] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.618540] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528f5413-c546-e507-9820-c23ee9c8aa97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.780988] env[62619]: DEBUG nova.compute.utils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1586.782766] env[62619]: DEBUG nova.compute.manager [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1586.782766] env[62619]: DEBUG nova.network.neutron [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1586.855763] env[62619]: DEBUG nova.policy [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cde3ba9ee004055bb5e09bc932dc4f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0948c27a2b08413ba82d553452965c9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1586.877444] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777802, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.976869] env[62619]: INFO nova.compute.manager [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Took 32.04 seconds to build instance. [ 1586.986277] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777801, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.022770] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1587.023159] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-344d5325-8406-40e5-b726-5e7bd675586f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.033354] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1587.033354] env[62619]: value = "task-1777806" [ 1587.033354] env[62619]: _type = "Task" [ 1587.033354] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.042232] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777806, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.105990] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777804, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.121387] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528f5413-c546-e507-9820-c23ee9c8aa97, 'name': SearchDatastore_Task, 'duration_secs': 0.01679} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.121681] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.121953] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1587.122153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.122300] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.122472] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1587.122730] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-307b211f-a8f8-4671-9e59-1fd0a5c70496 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.130760] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1587.130987] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1587.134012] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f968cb2-2aa6-4102-9251-da4411b9d5ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.139427] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1587.139427] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5249fac2-1061-f101-e6f1-b6735fe53134" [ 1587.139427] env[62619]: _type = "Task" [ 1587.139427] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.148544] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5249fac2-1061-f101-e6f1-b6735fe53134, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.291622] env[62619]: DEBUG nova.compute.manager [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1587.378105] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777802, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.479629] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777801, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.675897} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.480103] env[62619]: DEBUG oslo_concurrency.lockutils [None req-030c7a54-032b-400f-82da-a4bdedbf82c0 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.548s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.480841] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4b2e9965-cbd4-4d98-b003-436b4a8c913e/4b2e9965-cbd4-4d98-b003-436b4a8c913e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1587.480841] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1587.481062] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea3f2822-70b2-4758-827e-164bd71b6070 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.485768] env[62619]: DEBUG nova.network.neutron [req-9a0bf06e-8d26-4d35-97d7-16827cead76a req-a09e486a-83f3-4e77-adb5-c1993de04a77 service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Updated VIF entry in instance network info cache for port fac1bf3e-4c83-47cb-9a34-2a16035eb800. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1587.486256] env[62619]: DEBUG nova.network.neutron [req-9a0bf06e-8d26-4d35-97d7-16827cead76a req-a09e486a-83f3-4e77-adb5-c1993de04a77 service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Updating instance_info_cache with network_info: [{"id": "fac1bf3e-4c83-47cb-9a34-2a16035eb800", "address": "fa:16:3e:b5:f2:52", "network": {"id": "712c7a9c-8039-44f9-91d1-27991eef432a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-366397882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7dea13f34f140dd98291849f66720ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfac1bf3e-4c", "ovs_interfaceid": "fac1bf3e-4c83-47cb-9a34-2a16035eb800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.494159] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for the task: (returnval){ [ 1587.494159] env[62619]: value = "task-1777807" [ 1587.494159] env[62619]: _type = "Task" [ 1587.494159] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.506800] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777807, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.548209] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777806, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.591600] env[62619]: DEBUG nova.network.neutron [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Successfully created port: 2e09e8e9-5dd5-425b-9bed-ed1f14f42a12 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1587.611876] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777804, 'name': CreateSnapshot_Task, 'duration_secs': 1.031324} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.612204] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1587.612984] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b2749c-5a08-429e-89bf-9d9fbc7d4cee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.651429] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5249fac2-1061-f101-e6f1-b6735fe53134, 'name': SearchDatastore_Task, 'duration_secs': 0.022766} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.652296] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f48a8af-9afe-41db-a331-0bd8bc5286c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.658176] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1587.658176] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52264f51-b9d3-e2e5-3966-a94e36168758" [ 1587.658176] env[62619]: _type = "Task" [ 1587.658176] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.669038] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52264f51-b9d3-e2e5-3966-a94e36168758, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.845982] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8844c2e-8b98-4d8b-8ea3-3c3ead617f52 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.857673] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d104cc9-c6ed-4ac9-bb0e-0dd93bf65d6a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.901013] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a975dd8-7686-4ab7-97b5-4aff28fa99a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.909075] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777802, 'name': ReconfigVM_Task, 'duration_secs': 1.70545} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.911164] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 5b1008fb-7c0a-4e12-90f8-119a82ea62f1/5b1008fb-7c0a-4e12-90f8-119a82ea62f1.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1587.911788] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-730ed70a-4f84-4718-a4db-2b8f4d48f3d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.914459] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c2d266-cb32-4f6c-bea3-f65544adfe9d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.929260] env[62619]: DEBUG nova.compute.provider_tree [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1587.933180] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1587.933180] env[62619]: value = "task-1777808" [ 1587.933180] env[62619]: _type = "Task" [ 1587.933180] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.941461] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777808, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.989981] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a0bf06e-8d26-4d35-97d7-16827cead76a req-a09e486a-83f3-4e77-adb5-c1993de04a77 service nova] Releasing lock "refresh_cache-a6ba8114-0261-4894-98c0-9e0360f6d256" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.003199] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777807, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089399} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.003466] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1588.004367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e5337c-3c2f-49af-abfd-19d96c2ec607 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.028377] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 4b2e9965-cbd4-4d98-b003-436b4a8c913e/4b2e9965-cbd4-4d98-b003-436b4a8c913e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1588.028661] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c9fd76c-229c-4059-bf71-21de7b9bcfe4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.053192] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777806, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.054720] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for the task: (returnval){ [ 1588.054720] env[62619]: value = "task-1777809" [ 1588.054720] env[62619]: _type = "Task" [ 1588.054720] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.064945] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777809, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.133150] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1588.133150] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ffefc4cc-9a4f-4dd3-918a-03275e8e6d78 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.141604] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1588.141604] env[62619]: value = "task-1777810" [ 1588.141604] env[62619]: _type = "Task" [ 1588.141604] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.150643] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777810, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.169706] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52264f51-b9d3-e2e5-3966-a94e36168758, 'name': SearchDatastore_Task, 'duration_secs': 0.019983} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.170092] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.170257] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a6ba8114-0261-4894-98c0-9e0360f6d256/a6ba8114-0261-4894-98c0-9e0360f6d256.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1588.170547] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28dc589e-1621-4493-96f7-94438683eb40 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.177211] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1588.177211] env[62619]: value = "task-1777811" [ 1588.177211] env[62619]: _type = "Task" [ 1588.177211] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.185519] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777811, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.304026] env[62619]: DEBUG nova.compute.manager [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1588.335544] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='bb40f2aa83c79cc4abcaaa5741f026d7',container_format='bare',created_at=2024-12-11T22:51:57Z,direct_url=,disk_format='vmdk',id=88432144-700d-4829-a1f6-4d35530dfc87,min_disk=1,min_ram=0,name='tempest-test-snap-2019842734',owner='0948c27a2b08413ba82d553452965c9b',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-11T22:52:14Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1588.335825] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1588.335985] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1588.336355] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1588.337032] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1588.337032] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1588.337032] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1588.337317] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1588.337403] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1588.337540] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1588.337821] env[62619]: DEBUG nova.virt.hardware [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1588.338678] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30318d11-4100-4589-bf1e-d693a3952fd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.349261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b67e27-83e8-46c5-bd33-c96357b522b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.433113] env[62619]: DEBUG nova.scheduler.client.report [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1588.446950] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777808, 'name': Rename_Task, 'duration_secs': 0.28259} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.447302] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1588.447605] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0ebba2f-a491-41bc-b322-ab6fd49836db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.454871] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1588.454871] env[62619]: value = "task-1777812" [ 1588.454871] env[62619]: _type = "Task" [ 1588.454871] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.464567] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777812, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.556760] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777806, 'name': CreateSnapshot_Task, 'duration_secs': 1.349503} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.561076] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1588.561814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4ced12-4676-4708-a146-572f304dba36 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.574037] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777809, 'name': ReconfigVM_Task, 'duration_secs': 0.294197} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.576896] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 4b2e9965-cbd4-4d98-b003-436b4a8c913e/4b2e9965-cbd4-4d98-b003-436b4a8c913e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1588.577648] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6782415c-7ba2-4fe3-95fe-f94e9ec09bb6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.585081] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for the task: (returnval){ [ 1588.585081] env[62619]: value = "task-1777813" [ 1588.585081] env[62619]: _type = "Task" [ 1588.585081] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.595940] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777813, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.654935] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777810, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.689139] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777811, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.942355] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.669s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.942880] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1588.945873] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.769s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.946149] env[62619]: DEBUG nova.objects.instance [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lazy-loading 'resources' on Instance uuid 7217d898-54ee-46ed-88fa-959c38e988e7 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1588.967859] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777812, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.084976] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1589.085382] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2288d64f-ae4e-48dd-90fe-bee2ab751247 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.096835] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777813, 'name': Rename_Task, 'duration_secs': 0.273426} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.098127] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1589.098447] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1589.098447] env[62619]: value = "task-1777814" [ 1589.098447] env[62619]: _type = "Task" [ 1589.098447] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.098627] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bba43673-4c56-45a1-b397-f343361bd2a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.107902] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777814, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.109100] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for the task: (returnval){ [ 1589.109100] env[62619]: value = "task-1777815" [ 1589.109100] env[62619]: _type = "Task" [ 1589.109100] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.119698] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777815, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.152090] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777810, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.188709] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777811, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699829} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.189042] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a6ba8114-0261-4894-98c0-9e0360f6d256/a6ba8114-0261-4894-98c0-9e0360f6d256.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1589.189253] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1589.189548] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0fd8672-a647-4256-a45c-0800d329e909 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.206591] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1589.206591] env[62619]: value = "task-1777816" [ 1589.206591] env[62619]: _type = "Task" [ 1589.206591] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.215652] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777816, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.439509] env[62619]: DEBUG nova.compute.manager [req-f2cca1b2-1c66-4621-a861-74b99ba989a9 req-4ff2aa56-dd76-4457-b9b5-e2ea522e1918 service nova] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Received event network-vif-plugged-2e09e8e9-5dd5-425b-9bed-ed1f14f42a12 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1589.439768] env[62619]: DEBUG oslo_concurrency.lockutils [req-f2cca1b2-1c66-4621-a861-74b99ba989a9 req-4ff2aa56-dd76-4457-b9b5-e2ea522e1918 service nova] Acquiring lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.439857] env[62619]: DEBUG oslo_concurrency.lockutils [req-f2cca1b2-1c66-4621-a861-74b99ba989a9 req-4ff2aa56-dd76-4457-b9b5-e2ea522e1918 service nova] Lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.440053] env[62619]: DEBUG oslo_concurrency.lockutils [req-f2cca1b2-1c66-4621-a861-74b99ba989a9 req-4ff2aa56-dd76-4457-b9b5-e2ea522e1918 service nova] Lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.440280] env[62619]: DEBUG nova.compute.manager [req-f2cca1b2-1c66-4621-a861-74b99ba989a9 req-4ff2aa56-dd76-4457-b9b5-e2ea522e1918 service nova] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] No waiting events found dispatching network-vif-plugged-2e09e8e9-5dd5-425b-9bed-ed1f14f42a12 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1589.440472] env[62619]: WARNING nova.compute.manager [req-f2cca1b2-1c66-4621-a861-74b99ba989a9 req-4ff2aa56-dd76-4457-b9b5-e2ea522e1918 service nova] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Received unexpected event network-vif-plugged-2e09e8e9-5dd5-425b-9bed-ed1f14f42a12 for instance with vm_state building and task_state spawning. [ 1589.448954] env[62619]: DEBUG nova.compute.utils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1589.453545] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1589.453717] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1589.466990] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777812, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.569324] env[62619]: DEBUG nova.policy [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cf40e9c8cc34d578bb40763740eb5cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbc6315c41fe451a94e28d68cab87b2e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1589.616533] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777814, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.624987] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777815, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.654030] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777810, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.720909] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777816, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086895} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.721973] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1589.722909] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bd9c14-2002-4afc-b4d5-0d55e3de50bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.757585] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] a6ba8114-0261-4894-98c0-9e0360f6d256/a6ba8114-0261-4894-98c0-9e0360f6d256.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1589.758736] env[62619]: DEBUG nova.network.neutron [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Successfully updated port: 2e09e8e9-5dd5-425b-9bed-ed1f14f42a12 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1589.762659] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-338f3799-cd62-4acd-99e2-55f3616b7853 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.779444] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "refresh_cache-ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.779669] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "refresh_cache-ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.779706] env[62619]: DEBUG nova.network.neutron [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1589.787529] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1589.787529] env[62619]: value = "task-1777817" [ 1589.787529] env[62619]: _type = "Task" [ 1589.787529] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.801380] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777817, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.956200] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1589.968821] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777812, 'name': PowerOnVM_Task, 'duration_secs': 1.240935} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.969183] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1589.969412] env[62619]: DEBUG nova.compute.manager [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1589.970704] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddb1d26-4f79-4c5b-839a-9f18659cda07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.027181] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c06a6a-5384-4ecd-86bf-25aadddaeef8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.034932] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd76046d-7e8b-4fe9-93c8-281b8f84f2af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.067188] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fd1e61-d254-4e55-8807-9ee7e563f267 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.075238] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d83b60-3d70-49aa-b891-56d86f769587 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.091360] env[62619]: DEBUG nova.compute.provider_tree [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1590.112240] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777814, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.120494] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777815, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.154508] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777810, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.197881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.198276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.198440] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.198627] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.198793] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.201010] env[62619]: INFO nova.compute.manager [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Terminating instance [ 1590.297574] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777817, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.344776] env[62619]: DEBUG nova.network.neutron [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1590.426182] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Successfully created port: ac8f6def-d1ab-4e64-a359-5a90c2d13c1c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1590.491025] env[62619]: INFO nova.compute.manager [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] bringing vm to original state: 'stopped' [ 1590.594793] env[62619]: DEBUG nova.scheduler.client.report [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1590.613024] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777814, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.623763] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777815, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.657394] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777810, 'name': CloneVM_Task, 'duration_secs': 2.143152} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.657652] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Created linked-clone VM from snapshot [ 1590.658433] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28b423e-e829-4731-844f-a03fe3b383e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.668402] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Uploading image b6c3d7df-6de8-4488-be06-95164f241d93 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1590.703048] env[62619]: DEBUG oslo_vmware.rw_handles [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1590.703048] env[62619]: value = "vm-369055" [ 1590.703048] env[62619]: _type = "VirtualMachine" [ 1590.703048] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1590.703343] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9d9f9653-6a4a-4522-a6b6-f813221fba7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.705477] env[62619]: DEBUG nova.compute.manager [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1590.705572] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1590.706322] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3e8bac-7761-499d-bbab-a2963568079a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.714223] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1590.714446] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09c91f09-c75b-4eac-ab2a-dc7f1fea45a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.716855] env[62619]: DEBUG oslo_vmware.rw_handles [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lease: (returnval){ [ 1590.716855] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52728328-e299-ca81-8300-956bdff16bbc" [ 1590.716855] env[62619]: _type = "HttpNfcLease" [ 1590.716855] env[62619]: } obtained for exporting VM: (result){ [ 1590.716855] env[62619]: value = "vm-369055" [ 1590.716855] env[62619]: _type = "VirtualMachine" [ 1590.716855] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1590.717121] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the lease: (returnval){ [ 1590.717121] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52728328-e299-ca81-8300-956bdff16bbc" [ 1590.717121] env[62619]: _type = "HttpNfcLease" [ 1590.717121] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1590.721285] env[62619]: DEBUG oslo_vmware.api [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1590.721285] env[62619]: value = "task-1777819" [ 1590.721285] env[62619]: _type = "Task" [ 1590.721285] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.727832] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1590.727832] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52728328-e299-ca81-8300-956bdff16bbc" [ 1590.727832] env[62619]: _type = "HttpNfcLease" [ 1590.727832] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1590.732666] env[62619]: DEBUG oslo_vmware.api [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777819, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.764416] env[62619]: DEBUG nova.network.neutron [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Updating instance_info_cache with network_info: [{"id": "2e09e8e9-5dd5-425b-9bed-ed1f14f42a12", "address": "fa:16:3e:05:26:9e", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e09e8e9-5d", "ovs_interfaceid": "2e09e8e9-5dd5-425b-9bed-ed1f14f42a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.800478] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777817, 'name': ReconfigVM_Task, 'duration_secs': 0.689393} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.800864] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Reconfigured VM instance instance-0000003f to attach disk [datastore1] a6ba8114-0261-4894-98c0-9e0360f6d256/a6ba8114-0261-4894-98c0-9e0360f6d256.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1590.801145] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62619) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1590.801862] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-22d2fa40-003d-4141-a0b6-ee45998e9137 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.809650] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1590.809650] env[62619]: value = "task-1777820" [ 1590.809650] env[62619]: _type = "Task" [ 1590.809650] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.818570] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777820, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.969795] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1591.001610] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1591.001855] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1591.002014] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1591.002226] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1591.002426] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1591.002629] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1591.002857] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1591.003089] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1591.003329] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1591.003545] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1591.003725] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1591.004672] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94afe13e-d7d9-43dd-a654-64f672d87626 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.013906] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb59aad-45c8-431e-bda8-16f4d58c8006 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.100107] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.154s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.102453] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.539s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.102682] env[62619]: DEBUG nova.objects.instance [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lazy-loading 'resources' on Instance uuid 79dfeb2b-06d0-45f1-b97e-10fa4f00d282 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1591.115956] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777814, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.123662] env[62619]: DEBUG oslo_vmware.api [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777815, 'name': PowerOnVM_Task, 'duration_secs': 1.549549} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.124603] env[62619]: INFO nova.scheduler.client.report [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Deleted allocations for instance 7217d898-54ee-46ed-88fa-959c38e988e7 [ 1591.128250] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1591.128451] env[62619]: INFO nova.compute.manager [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Took 11.18 seconds to spawn the instance on the hypervisor. [ 1591.128731] env[62619]: DEBUG nova.compute.manager [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1591.129857] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0165368-6ba5-49b0-be99-0bb0df847811 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.228367] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1591.228367] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52728328-e299-ca81-8300-956bdff16bbc" [ 1591.228367] env[62619]: _type = "HttpNfcLease" [ 1591.228367] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1591.229200] env[62619]: DEBUG oslo_vmware.rw_handles [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1591.229200] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52728328-e299-ca81-8300-956bdff16bbc" [ 1591.229200] env[62619]: _type = "HttpNfcLease" [ 1591.229200] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1591.231569] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6941f64-566f-4de7-a648-96bc5463dee4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.238101] env[62619]: DEBUG oslo_vmware.api [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777819, 'name': PowerOffVM_Task, 'duration_secs': 0.209579} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.238772] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1591.238966] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1591.239257] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5d46d81-16dd-43f4-8399-e6c32db16587 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.243894] env[62619]: DEBUG oslo_vmware.rw_handles [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ade7ea-e68d-8fd9-0f26-4a145928c99b/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1591.244173] env[62619]: DEBUG oslo_vmware.rw_handles [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ade7ea-e68d-8fd9-0f26-4a145928c99b/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1591.306181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "refresh_cache-ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.306524] env[62619]: DEBUG nova.compute.manager [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Instance network_info: |[{"id": "2e09e8e9-5dd5-425b-9bed-ed1f14f42a12", "address": "fa:16:3e:05:26:9e", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e09e8e9-5d", "ovs_interfaceid": "2e09e8e9-5dd5-425b-9bed-ed1f14f42a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1591.306947] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:26:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a27fd90b-16a5-43af-bede-ae36762ece00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e09e8e9-5dd5-425b-9bed-ed1f14f42a12', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1591.314766] env[62619]: DEBUG oslo.service.loopingcall [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.316929] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1591.320482] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2b576a1-bd8c-47b6-b573-7abd1aad8219 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.342757] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777820, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.051402} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.344145] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62619) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1591.344414] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1591.344414] env[62619]: value = "task-1777822" [ 1591.344414] env[62619]: _type = "Task" [ 1591.344414] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.345107] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfddb330-6933-4f6a-bc64-4322f1a98f60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.355984] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777822, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.376069] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] a6ba8114-0261-4894-98c0-9e0360f6d256/ephemeral_0.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1591.377429] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55044bfc-cba3-499c-bf65-b2bb944bd700 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.392053] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1591.392290] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1591.392431] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Deleting the datastore file [datastore1] c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1591.395207] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f474460-fa58-4bd1-a40d-1f365255e206 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.397731] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fa26b798-65c7-4bac-b026-ef2318cdc98e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.402541] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1591.402541] env[62619]: value = "task-1777823" [ 1591.402541] env[62619]: _type = "Task" [ 1591.402541] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.409289] env[62619]: DEBUG oslo_vmware.api [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1591.409289] env[62619]: value = "task-1777824" [ 1591.409289] env[62619]: _type = "Task" [ 1591.409289] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.416712] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777823, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.424499] env[62619]: DEBUG oslo_vmware.api [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777824, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.470455] env[62619]: DEBUG nova.compute.manager [req-1e2a4b79-ae0e-478b-b82e-0588c07b0b1a req-2a5038f6-1f81-45ac-8540-20b14976d0dc service nova] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Received event network-changed-2e09e8e9-5dd5-425b-9bed-ed1f14f42a12 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1591.471963] env[62619]: DEBUG nova.compute.manager [req-1e2a4b79-ae0e-478b-b82e-0588c07b0b1a req-2a5038f6-1f81-45ac-8540-20b14976d0dc service nova] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Refreshing instance network info cache due to event network-changed-2e09e8e9-5dd5-425b-9bed-ed1f14f42a12. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1591.471963] env[62619]: DEBUG oslo_concurrency.lockutils [req-1e2a4b79-ae0e-478b-b82e-0588c07b0b1a req-2a5038f6-1f81-45ac-8540-20b14976d0dc service nova] Acquiring lock "refresh_cache-ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.471963] env[62619]: DEBUG oslo_concurrency.lockutils [req-1e2a4b79-ae0e-478b-b82e-0588c07b0b1a req-2a5038f6-1f81-45ac-8540-20b14976d0dc service nova] Acquired lock "refresh_cache-ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.471963] env[62619]: DEBUG nova.network.neutron [req-1e2a4b79-ae0e-478b-b82e-0588c07b0b1a req-2a5038f6-1f81-45ac-8540-20b14976d0dc service nova] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Refreshing network info cache for port 2e09e8e9-5dd5-425b-9bed-ed1f14f42a12 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1591.497604] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.497604] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.497604] env[62619]: DEBUG nova.compute.manager [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1591.497939] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a6cb4d-bbf7-4927-9bc6-d4e68f532d6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.505541] env[62619]: DEBUG nova.compute.manager [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1591.616622] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777814, 'name': CloneVM_Task, 'duration_secs': 2.297513} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.617777] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Created linked-clone VM from snapshot [ 1591.618642] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed496b2-af3d-44f3-84be-850fe70402b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.627810] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Uploading image 4d6d5df4-da7c-4196-bb28-5af17f80671f {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1591.633515] env[62619]: DEBUG oslo_concurrency.lockutils [None req-78ee8a9f-5547-4253-adfb-5f5f01eec20e tempest-ServerShowV247Test-778751691 tempest-ServerShowV247Test-778751691-project-member] Lock "7217d898-54ee-46ed-88fa-959c38e988e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.953s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.657601] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1591.657601] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-70a1f796-7c90-4c46-91f4-2fbc4e336b86 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.661418] env[62619]: INFO nova.compute.manager [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Took 35.48 seconds to build instance. [ 1591.667278] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1591.667278] env[62619]: value = "task-1777825" [ 1591.667278] env[62619]: _type = "Task" [ 1591.667278] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.680255] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777825, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.857878] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777822, 'name': CreateVM_Task, 'duration_secs': 0.405883} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.858028] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1591.858829] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.859228] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.859564] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1591.862978] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-645bfbf3-ab51-410b-ae41-58108a6c40da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.868551] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1591.868551] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5250a71a-7647-4e4c-cfc7-f7bbbd8dc8d7" [ 1591.868551] env[62619]: _type = "Task" [ 1591.868551] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.886780] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.887074] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Processing image 88432144-700d-4829-a1f6-4d35530dfc87 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1591.887522] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87/88432144-700d-4829-a1f6-4d35530dfc87.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.887615] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87/88432144-700d-4829-a1f6-4d35530dfc87.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.887865] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1591.888495] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0162496a-1896-493f-816d-f7274195dfe0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.913935] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1591.914333] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1591.918961] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e441341f-04f1-4881-9100-745e4645b162 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.925528] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777823, 'name': ReconfigVM_Task, 'duration_secs': 0.493622} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.929643] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Reconfigured VM instance instance-0000003f to attach disk [datastore1] a6ba8114-0261-4894-98c0-9e0360f6d256/ephemeral_0.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1591.930934] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-107c0ab5-6361-4bf2-8516-1fcad5e1d7bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.938745] env[62619]: DEBUG oslo_vmware.api [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777824, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235384} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.939120] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1591.939120] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522e604d-589e-2d55-f1f6-1608ac628eae" [ 1591.939120] env[62619]: _type = "Task" [ 1591.939120] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.940090] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1591.940394] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1591.940789] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1591.941222] env[62619]: INFO nova.compute.manager [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1591.941715] env[62619]: DEBUG oslo.service.loopingcall [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.942826] env[62619]: DEBUG nova.compute.manager [-] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1591.943133] env[62619]: DEBUG nova.network.neutron [-] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1591.952890] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1591.952890] env[62619]: value = "task-1777826" [ 1591.952890] env[62619]: _type = "Task" [ 1591.952890] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.962996] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Preparing fetch location {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1591.963418] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Fetch image to [datastore1] OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321/OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321.vmdk {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1591.963737] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Downloading stream optimized image 88432144-700d-4829-a1f6-4d35530dfc87 to [datastore1] OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321/OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321.vmdk on the data store datastore1 as vApp {{(pid=62619) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1591.963947] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Downloading image file data 88432144-700d-4829-a1f6-4d35530dfc87 to the ESX as VM named 'OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321' {{(pid=62619) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1591.970938] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777826, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.016698] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1592.040548] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2a2d467-148d-4c78-a3c1-e1effe021c86 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.053990] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1592.053990] env[62619]: value = "task-1777827" [ 1592.053990] env[62619]: _type = "Task" [ 1592.053990] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.064776] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777827, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.105449] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1592.105449] env[62619]: value = "resgroup-9" [ 1592.105449] env[62619]: _type = "ResourcePool" [ 1592.105449] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1592.105737] env[62619]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-0a35ca69-d119-4600-9aff-9099a512d207 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.141630] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lease: (returnval){ [ 1592.141630] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5215ca46-9330-d8e7-ea49-f0a67440eeb1" [ 1592.141630] env[62619]: _type = "HttpNfcLease" [ 1592.141630] env[62619]: } obtained for vApp import into resource pool (val){ [ 1592.141630] env[62619]: value = "resgroup-9" [ 1592.141630] env[62619]: _type = "ResourcePool" [ 1592.141630] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1592.148138] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the lease: (returnval){ [ 1592.148138] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5215ca46-9330-d8e7-ea49-f0a67440eeb1" [ 1592.148138] env[62619]: _type = "HttpNfcLease" [ 1592.148138] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1592.163990] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ac4642d-1c89-426b-a98a-ab8b29181b48 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.993s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.166305] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1592.166305] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5215ca46-9330-d8e7-ea49-f0a67440eeb1" [ 1592.166305] env[62619]: _type = "HttpNfcLease" [ 1592.166305] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1592.181327] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777825, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.263823] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74241b5f-b46e-4039-ad1a-ca800949fcf1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.272262] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d9e154-c532-4687-9598-ee0effcec3f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.026556] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Successfully updated port: ac8f6def-d1ab-4e64-a359-5a90c2d13c1c {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1593.047642] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00edd0a2-d5b4-4545-a175-7ff8a8529df3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.059514] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777826, 'name': Rename_Task, 'duration_secs': 0.234826} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.069062] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1593.069415] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1593.069415] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5215ca46-9330-d8e7-ea49-f0a67440eeb1" [ 1593.069415] env[62619]: _type = "HttpNfcLease" [ 1593.069415] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1593.070574] env[62619]: DEBUG oslo_vmware.api [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777827, 'name': PowerOffVM_Task, 'duration_secs': 0.205895} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.070978] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777825, 'name': Destroy_Task, 'duration_secs': 0.641843} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.071214] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-472162f6-c455-43ac-8d78-bb9a4946d16e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.073552] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1593.073552] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5215ca46-9330-d8e7-ea49-f0a67440eeb1" [ 1593.073552] env[62619]: _type = "HttpNfcLease" [ 1593.073552] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1593.074891] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4bf116-5843-4bd5-83f7-4a2035ed4bc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.078707] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1593.079214] env[62619]: DEBUG nova.compute.manager [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1593.079280] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Destroyed the VM [ 1593.079478] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1593.080798] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3f1897-19c2-41c4-a0c1-32bab4f8fa21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.084470] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa1b667-2a9f-4fed-8c63-e84abfb8d10b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.086813] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6a5f2c3e-7777-4ac9-b8c0-36ae59dd7fdb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.099968] env[62619]: DEBUG nova.compute.provider_tree [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1593.109419] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1593.109419] env[62619]: value = "task-1777830" [ 1593.109419] env[62619]: _type = "Task" [ 1593.109419] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.109736] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1593.109736] env[62619]: value = "task-1777829" [ 1593.109736] env[62619]: _type = "Task" [ 1593.109736] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.109954] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522a29fe-bcc8-3b05-04cf-3e1f1f20e519/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1593.110190] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522a29fe-bcc8-3b05-04cf-3e1f1f20e519/disk-0.vmdk. {{(pid=62619) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1593.116546] env[62619]: DEBUG nova.network.neutron [req-1e2a4b79-ae0e-478b-b82e-0588c07b0b1a req-2a5038f6-1f81-45ac-8540-20b14976d0dc service nova] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Updated VIF entry in instance network info cache for port 2e09e8e9-5dd5-425b-9bed-ed1f14f42a12. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1593.116921] env[62619]: DEBUG nova.network.neutron [req-1e2a4b79-ae0e-478b-b82e-0588c07b0b1a req-2a5038f6-1f81-45ac-8540-20b14976d0dc service nova] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Updating instance_info_cache with network_info: [{"id": "2e09e8e9-5dd5-425b-9bed-ed1f14f42a12", "address": "fa:16:3e:05:26:9e", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e09e8e9-5d", "ovs_interfaceid": "2e09e8e9-5dd5-425b-9bed-ed1f14f42a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.195440] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0463493f-b258-4856-a456-092a9cccb7a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.197422] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777830, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.198125] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777829, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.553039] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe9e3959-0bce-4f96-a6de-70ef1acd8b68 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquiring lock "interface-4b2e9965-cbd4-4d98-b003-436b4a8c913e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.553446] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe9e3959-0bce-4f96-a6de-70ef1acd8b68 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "interface-4b2e9965-cbd4-4d98-b003-436b4a8c913e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.553913] env[62619]: DEBUG nova.objects.instance [None req-fe9e3959-0bce-4f96-a6de-70ef1acd8b68 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lazy-loading 'flavor' on Instance uuid 4b2e9965-cbd4-4d98-b003-436b4a8c913e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1593.557064] env[62619]: DEBUG nova.network.neutron [-] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.560680] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "refresh_cache-a5e4c524-7cc8-4981-899e-1a7c80fac2bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.560680] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "refresh_cache-a5e4c524-7cc8-4981-899e-1a7c80fac2bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.560680] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1593.603374] env[62619]: DEBUG nova.scheduler.client.report [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1593.619334] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.123s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.620384] env[62619]: DEBUG oslo_concurrency.lockutils [req-1e2a4b79-ae0e-478b-b82e-0588c07b0b1a req-2a5038f6-1f81-45ac-8540-20b14976d0dc service nova] Releasing lock "refresh_cache-ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.641709] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777830, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.649492] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777829, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.062264] env[62619]: DEBUG nova.objects.instance [None req-fe9e3959-0bce-4f96-a6de-70ef1acd8b68 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lazy-loading 'pci_requests' on Instance uuid 4b2e9965-cbd4-4d98-b003-436b4a8c913e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1594.063907] env[62619]: INFO nova.compute.manager [-] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Took 2.12 seconds to deallocate network for instance. [ 1594.107883] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1594.112260] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.010s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.117930] env[62619]: DEBUG oslo_concurrency.lockutils [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.535s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.118288] env[62619]: DEBUG nova.objects.instance [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lazy-loading 'resources' on Instance uuid 0272ca2a-e9ff-4af5-8120-278a82d74627 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1594.139146] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.145437] env[62619]: INFO nova.scheduler.client.report [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Deleted allocations for instance 79dfeb2b-06d0-45f1-b97e-10fa4f00d282 [ 1594.155776] env[62619]: DEBUG oslo_vmware.api [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777830, 'name': RemoveSnapshot_Task, 'duration_secs': 0.98835} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.156363] env[62619]: DEBUG oslo_vmware.api [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1777829, 'name': PowerOnVM_Task, 'duration_secs': 0.693921} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.158824] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1594.161471] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1594.161962] env[62619]: INFO nova.compute.manager [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Took 11.12 seconds to spawn the instance on the hypervisor. [ 1594.161962] env[62619]: DEBUG nova.compute.manager [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1594.163046] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d16fee5-3ee4-436c-9bba-93bb402b2f8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.246902] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Completed reading data from the image iterator. {{(pid=62619) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1594.247144] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522a29fe-bcc8-3b05-04cf-3e1f1f20e519/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1594.248039] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872c24ba-5def-4720-acd6-243b54294942 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.255455] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522a29fe-bcc8-3b05-04cf-3e1f1f20e519/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1594.255702] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522a29fe-bcc8-3b05-04cf-3e1f1f20e519/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1594.256069] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-bd529968-28e2-4b5a-b84b-4dc258a13d52 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.308353] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Updating instance_info_cache with network_info: [{"id": "ac8f6def-d1ab-4e64-a359-5a90c2d13c1c", "address": "fa:16:3e:d8:e2:80", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac8f6def-d1", "ovs_interfaceid": "ac8f6def-d1ab-4e64-a359-5a90c2d13c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.352782] env[62619]: DEBUG nova.compute.manager [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Received event network-vif-plugged-ac8f6def-d1ab-4e64-a359-5a90c2d13c1c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1594.353077] env[62619]: DEBUG oslo_concurrency.lockutils [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] Acquiring lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.353362] env[62619]: DEBUG oslo_concurrency.lockutils [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] Lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.353582] env[62619]: DEBUG oslo_concurrency.lockutils [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] Lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.353830] env[62619]: DEBUG nova.compute.manager [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] No waiting events found dispatching network-vif-plugged-ac8f6def-d1ab-4e64-a359-5a90c2d13c1c {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1594.354064] env[62619]: WARNING nova.compute.manager [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Received unexpected event network-vif-plugged-ac8f6def-d1ab-4e64-a359-5a90c2d13c1c for instance with vm_state building and task_state spawning. [ 1594.354314] env[62619]: DEBUG nova.compute.manager [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Received event network-vif-deleted-1535065e-6e20-4745-957d-4e77e36ba2d3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1594.354565] env[62619]: DEBUG nova.compute.manager [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Received event network-changed-ac8f6def-d1ab-4e64-a359-5a90c2d13c1c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1594.354759] env[62619]: DEBUG nova.compute.manager [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Refreshing instance network info cache due to event network-changed-ac8f6def-d1ab-4e64-a359-5a90c2d13c1c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1594.354995] env[62619]: DEBUG oslo_concurrency.lockutils [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] Acquiring lock "refresh_cache-a5e4c524-7cc8-4981-899e-1a7c80fac2bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.494187] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522a29fe-bcc8-3b05-04cf-3e1f1f20e519/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1594.494489] env[62619]: INFO nova.virt.vmwareapi.images [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Downloaded image file data 88432144-700d-4829-a1f6-4d35530dfc87 [ 1594.495655] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a241ac-ee54-4c40-93cd-cb08acb32f42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.511797] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5bbcd91-f363-4966-8e8b-c46726ecfc0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.550712] env[62619]: INFO nova.virt.vmwareapi.images [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] The imported VM was unregistered [ 1594.553193] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Caching image {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1594.553451] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating directory with path [datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.553778] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9f6c203-db12-4be9-a45d-ccd35e3589a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.566630] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Created directory with path [datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1594.566844] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321/OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321.vmdk to [datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87/88432144-700d-4829-a1f6-4d35530dfc87.vmdk. {{(pid=62619) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1594.567132] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-d54f0a09-d441-4455-97fb-5714c094ef43 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.569684] env[62619]: DEBUG nova.objects.base [None req-fe9e3959-0bce-4f96-a6de-70ef1acd8b68 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Object Instance<4b2e9965-cbd4-4d98-b003-436b4a8c913e> lazy-loaded attributes: flavor,pci_requests {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1594.569910] env[62619]: DEBUG nova.network.neutron [None req-fe9e3959-0bce-4f96-a6de-70ef1acd8b68 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1594.577811] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.578279] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1594.578279] env[62619]: value = "task-1777832" [ 1594.578279] env[62619]: _type = "Task" [ 1594.578279] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.587162] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777832, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.664555] env[62619]: WARNING nova.compute.manager [None req-beff96a7-b44b-4731-9856-f724e1d23e7d tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Image not found during snapshot: nova.exception.ImageNotFound: Image 4d6d5df4-da7c-4196-bb28-5af17f80671f could not be found. [ 1594.665876] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d4fc111-59af-44ca-b978-4ebd35daa800 tempest-ServerPasswordTestJSON-1855587611 tempest-ServerPasswordTestJSON-1855587611-project-member] Lock "79dfeb2b-06d0-45f1-b97e-10fa4f00d282" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.084s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.675499] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe9e3959-0bce-4f96-a6de-70ef1acd8b68 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "interface-4b2e9965-cbd4-4d98-b003-436b4a8c913e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.122s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.685314] env[62619]: INFO nova.compute.manager [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Took 37.08 seconds to build instance. [ 1594.810842] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "refresh_cache-a5e4c524-7cc8-4981-899e-1a7c80fac2bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.811206] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Instance network_info: |[{"id": "ac8f6def-d1ab-4e64-a359-5a90c2d13c1c", "address": "fa:16:3e:d8:e2:80", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac8f6def-d1", "ovs_interfaceid": "ac8f6def-d1ab-4e64-a359-5a90c2d13c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1594.811509] env[62619]: DEBUG oslo_concurrency.lockutils [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] Acquired lock "refresh_cache-a5e4c524-7cc8-4981-899e-1a7c80fac2bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.811690] env[62619]: DEBUG nova.network.neutron [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Refreshing network info cache for port ac8f6def-d1ab-4e64-a359-5a90c2d13c1c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1594.813139] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:e2:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac8f6def-d1ab-4e64-a359-5a90c2d13c1c', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1594.820415] env[62619]: DEBUG oslo.service.loopingcall [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1594.823731] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1594.824194] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68f6d457-cecb-4e4d-9124-b9a62bccf272 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.847040] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1594.847040] env[62619]: value = "task-1777833" [ 1594.847040] env[62619]: _type = "Task" [ 1594.847040] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.856329] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777833, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.065608] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885bc95d-5e98-4028-91fc-a36de54c5729 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.074886] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64522031-b9d8-4e8f-974c-05cd8c734326 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.114875] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0628970d-8a41-48c8-b741-3f80aa06880e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.117703] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777832, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.127216] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a8f89a-a04b-4096-bba3-6b21468a8b76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.146715] env[62619]: DEBUG nova.compute.provider_tree [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1595.188056] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e80e868e-7209-48e3-b663-c69499db9906 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "a6ba8114-0261-4894-98c0-9e0360f6d256" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.610s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.366353] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777833, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.569561] env[62619]: DEBUG nova.network.neutron [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Updated VIF entry in instance network info cache for port ac8f6def-d1ab-4e64-a359-5a90c2d13c1c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1595.569996] env[62619]: DEBUG nova.network.neutron [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Updating instance_info_cache with network_info: [{"id": "ac8f6def-d1ab-4e64-a359-5a90c2d13c1c", "address": "fa:16:3e:d8:e2:80", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac8f6def-d1", "ovs_interfaceid": "ac8f6def-d1ab-4e64-a359-5a90c2d13c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.595289] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777832, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.674774] env[62619]: ERROR nova.scheduler.client.report [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [req-42bde8c8-93cd-4020-99da-a60f9aed4126] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-42bde8c8-93cd-4020-99da-a60f9aed4126"}]} [ 1595.705525] env[62619]: DEBUG nova.scheduler.client.report [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1595.727826] env[62619]: DEBUG nova.scheduler.client.report [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1595.728133] env[62619]: DEBUG nova.compute.provider_tree [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1595.747581] env[62619]: DEBUG nova.scheduler.client.report [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1595.773395] env[62619]: DEBUG nova.scheduler.client.report [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1595.862404] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777833, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.966815] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "6cd2f6e6-79a4-41be-a349-b504028ecab4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.967200] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "6cd2f6e6-79a4-41be-a349-b504028ecab4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.967470] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "6cd2f6e6-79a4-41be-a349-b504028ecab4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.967739] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "6cd2f6e6-79a4-41be-a349-b504028ecab4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.967977] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "6cd2f6e6-79a4-41be-a349-b504028ecab4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.970249] env[62619]: INFO nova.compute.manager [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Terminating instance [ 1596.075320] env[62619]: DEBUG oslo_concurrency.lockutils [req-5aafec5f-7e67-4105-8eea-a3a6346a51cd req-4d2bacb8-02ed-47dc-8bd4-0cee50b7b7e8 service nova] Releasing lock "refresh_cache-a5e4c524-7cc8-4981-899e-1a7c80fac2bd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.099296] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777832, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.260133] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1415fe25-293f-4f26-9001-ac2df726c67d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.272767] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d5ddcf-bab5-40b7-bb6d-fe7330c73111 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.312249] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.312647] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.312944] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.313210] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.313472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.316035] env[62619]: INFO nova.compute.manager [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Terminating instance [ 1596.318244] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77b0b6d-980d-4362-84a6-80286f19f503 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.331868] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf2c459-f408-4895-a4a7-8169bc864780 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.351911] env[62619]: DEBUG nova.compute.provider_tree [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1596.367381] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777833, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.476980] env[62619]: DEBUG nova.compute.manager [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1596.477307] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1596.478580] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddd0e55-c574-439e-9f3b-0fabb00e6f0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.492678] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1596.493176] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38463939-4fe1-40bf-8d76-3971b0386273 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.505484] env[62619]: DEBUG oslo_vmware.api [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1596.505484] env[62619]: value = "task-1777834" [ 1596.505484] env[62619]: _type = "Task" [ 1596.505484] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.522134] env[62619]: DEBUG oslo_vmware.api [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777834, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.599628] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777832, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.825030] env[62619]: DEBUG nova.compute.manager [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1596.825030] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1596.825601] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb8c060-1a6b-45a0-adb2-b5f9ad250c03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.841144] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1596.841428] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82bd8a3d-a592-40db-b4c0-6b27bd6b4574 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.867302] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777833, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.898985] env[62619]: DEBUG nova.scheduler.client.report [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1596.899273] env[62619]: DEBUG nova.compute.provider_tree [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 90 to 91 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1596.899465] env[62619]: DEBUG nova.compute.provider_tree [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1597.006274] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1597.006274] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1597.006274] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleting the datastore file [datastore1] 5b1008fb-7c0a-4e12-90f8-119a82ea62f1 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1597.006274] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf74b418-b2eb-4eb9-a3a7-bab791687387 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.021659] env[62619]: DEBUG oslo_vmware.api [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777834, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.025850] env[62619]: DEBUG oslo_vmware.api [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1597.025850] env[62619]: value = "task-1777836" [ 1597.025850] env[62619]: _type = "Task" [ 1597.025850] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.037230] env[62619]: DEBUG oslo_vmware.api [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.100071] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777832, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.141881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquiring lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.142262] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.142489] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquiring lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.142678] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.142845] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.145106] env[62619]: INFO nova.compute.manager [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Terminating instance [ 1597.363042] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777833, 'name': CreateVM_Task, 'duration_secs': 2.387998} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.363488] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1597.364394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1597.364716] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1597.367017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1597.367017] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-928b0d90-de0c-4ea2-a420-fd5f27d43657 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.371216] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1597.371216] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c0eceb-6c91-a4b0-159f-93393d4019b0" [ 1597.371216] env[62619]: _type = "Task" [ 1597.371216] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.380626] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c0eceb-6c91-a4b0-159f-93393d4019b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.408325] env[62619]: DEBUG oslo_concurrency.lockutils [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.288s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.411505] env[62619]: DEBUG oslo_concurrency.lockutils [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.680s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.412356] env[62619]: DEBUG nova.objects.instance [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lazy-loading 'resources' on Instance uuid 0a80942c-eb86-480b-ab7b-33112dd90d28 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1597.449290] env[62619]: INFO nova.scheduler.client.report [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Deleted allocations for instance 0272ca2a-e9ff-4af5-8120-278a82d74627 [ 1597.522222] env[62619]: DEBUG oslo_vmware.api [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777834, 'name': PowerOffVM_Task, 'duration_secs': 0.863292} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.524955] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1597.524955] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1597.528308] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70e19e6c-bc8b-475e-8576-996f87064111 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.536062] env[62619]: DEBUG oslo_vmware.api [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.488888} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.536062] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1597.536062] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1597.536397] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1597.536397] env[62619]: INFO nova.compute.manager [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Took 0.71 seconds to destroy the instance on the hypervisor. [ 1597.536621] env[62619]: DEBUG oslo.service.loopingcall [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1597.536810] env[62619]: DEBUG nova.compute.manager [-] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1597.536903] env[62619]: DEBUG nova.network.neutron [-] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1597.595277] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777832, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.83123} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.597550] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321/OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321.vmdk to [datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87/88432144-700d-4829-a1f6-4d35530dfc87.vmdk. [ 1597.597753] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Cleaning up location [datastore1] OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1597.597915] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_da81f4d0-6208-410e-8cd0-6e1b6fc7e321 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1597.598189] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d203a4b-2417-44a9-a55a-87a4910d84bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.607020] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1597.607020] env[62619]: value = "task-1777838" [ 1597.607020] env[62619]: _type = "Task" [ 1597.607020] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.612470] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.615584] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1597.615781] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1597.615955] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Deleting the datastore file [datastore1] 6cd2f6e6-79a4-41be-a349-b504028ecab4 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1597.616217] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c67d1f5-32a3-4d20-83b9-4c39af5a57f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.623016] env[62619]: DEBUG oslo_vmware.api [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1597.623016] env[62619]: value = "task-1777839" [ 1597.623016] env[62619]: _type = "Task" [ 1597.623016] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.630477] env[62619]: DEBUG oslo_vmware.api [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.651079] env[62619]: DEBUG nova.compute.manager [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1597.651079] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1597.651079] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a8561f-1cb6-4e26-9a21-9635fe363fd5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.659935] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1597.659935] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43243bc2-4685-4c40-9a01-128848cf1848 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.664815] env[62619]: DEBUG oslo_vmware.api [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for the task: (returnval){ [ 1597.664815] env[62619]: value = "task-1777840" [ 1597.664815] env[62619]: _type = "Task" [ 1597.664815] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.676210] env[62619]: DEBUG oslo_vmware.api [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777840, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.725120] env[62619]: DEBUG nova.compute.manager [req-d97d39e6-2afa-4dad-a430-3477117b9d2f req-cf009fdb-2420-4b3a-ae50-f0f4a8207691 service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Received event network-changed-fac1bf3e-4c83-47cb-9a34-2a16035eb800 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1597.725120] env[62619]: DEBUG nova.compute.manager [req-d97d39e6-2afa-4dad-a430-3477117b9d2f req-cf009fdb-2420-4b3a-ae50-f0f4a8207691 service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Refreshing instance network info cache due to event network-changed-fac1bf3e-4c83-47cb-9a34-2a16035eb800. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1597.725120] env[62619]: DEBUG oslo_concurrency.lockutils [req-d97d39e6-2afa-4dad-a430-3477117b9d2f req-cf009fdb-2420-4b3a-ae50-f0f4a8207691 service nova] Acquiring lock "refresh_cache-a6ba8114-0261-4894-98c0-9e0360f6d256" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1597.725120] env[62619]: DEBUG oslo_concurrency.lockutils [req-d97d39e6-2afa-4dad-a430-3477117b9d2f req-cf009fdb-2420-4b3a-ae50-f0f4a8207691 service nova] Acquired lock "refresh_cache-a6ba8114-0261-4894-98c0-9e0360f6d256" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1597.725351] env[62619]: DEBUG nova.network.neutron [req-d97d39e6-2afa-4dad-a430-3477117b9d2f req-cf009fdb-2420-4b3a-ae50-f0f4a8207691 service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Refreshing network info cache for port fac1bf3e-4c83-47cb-9a34-2a16035eb800 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1597.883946] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c0eceb-6c91-a4b0-159f-93393d4019b0, 'name': SearchDatastore_Task, 'duration_secs': 0.020773} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.884332] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.884642] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1597.884802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1597.884939] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1597.885146] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1597.885709] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef64490d-45eb-46e3-806d-d2a529336384 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.896025] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1597.896025] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1597.896025] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd7ddc73-e3ba-4bbe-98a0-0de5e616efe2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.902650] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1597.902650] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f16068-228d-bfff-8ff3-0e3c7e9a5a0a" [ 1597.902650] env[62619]: _type = "Task" [ 1597.902650] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.912786] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f16068-228d-bfff-8ff3-0e3c7e9a5a0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.921704] env[62619]: DEBUG nova.compute.manager [req-c715634c-261f-4b5c-9be7-8bfdc01b6db9 req-ba782502-04d1-4ec4-9d1d-94a38ed78f5d service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Received event network-vif-deleted-61e78327-dbb0-497b-be4b-14a0a1d8a5de {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1597.922094] env[62619]: INFO nova.compute.manager [req-c715634c-261f-4b5c-9be7-8bfdc01b6db9 req-ba782502-04d1-4ec4-9d1d-94a38ed78f5d service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Neutron deleted interface 61e78327-dbb0-497b-be4b-14a0a1d8a5de; detaching it from the instance and deleting it from the info cache [ 1597.922148] env[62619]: DEBUG nova.network.neutron [req-c715634c-261f-4b5c-9be7-8bfdc01b6db9 req-ba782502-04d1-4ec4-9d1d-94a38ed78f5d service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.960181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-320d26ad-4d31-4ba4-bd5c-29d356d00449 tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "0272ca2a-e9ff-4af5-8120-278a82d74627" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.805s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.119277] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102615} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.120264] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1598.120264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87/88432144-700d-4829-a1f6-4d35530dfc87.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.120264] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87/88432144-700d-4829-a1f6-4d35530dfc87.vmdk to [datastore1] ec56c824-5f9a-47bf-bcd6-e456ddaad2f2/ec56c824-5f9a-47bf-bcd6-e456ddaad2f2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1598.120515] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45a6b601-2131-41cc-b3cc-00215cc9662c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.127502] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1598.127502] env[62619]: value = "task-1777841" [ 1598.127502] env[62619]: _type = "Task" [ 1598.127502] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.133831] env[62619]: DEBUG oslo_vmware.api [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.341816} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.137879] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1598.137879] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1598.137879] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1598.137879] env[62619]: INFO nova.compute.manager [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1598.137879] env[62619]: DEBUG oslo.service.loopingcall [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1598.138123] env[62619]: DEBUG nova.compute.manager [-] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1598.138123] env[62619]: DEBUG nova.network.neutron [-] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1598.142442] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777841, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.178821] env[62619]: DEBUG oslo_vmware.api [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777840, 'name': PowerOffVM_Task, 'duration_secs': 0.315961} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.179608] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1598.179608] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1598.179726] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbfb2118-779e-412a-a751-5d3783b276cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.306449] env[62619]: DEBUG nova.network.neutron [-] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.418863] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f16068-228d-bfff-8ff3-0e3c7e9a5a0a, 'name': SearchDatastore_Task, 'duration_secs': 0.013492} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.419889] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b4cd866-8ad7-4b0d-96aa-74cd7ffa2945 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.425810] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643262f2-22bd-4ac9-ba84-ac96af0a4d44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.431886] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-649c82bc-7b9a-4838-bc13-a499cb6c1f6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.433910] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1598.433910] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522f928c-d793-8ac4-d18f-fec045655d5f" [ 1598.433910] env[62619]: _type = "Task" [ 1598.433910] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.441307] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7dcc69-65ed-480a-b5f6-b31d08206e3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.449515] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522f928c-d793-8ac4-d18f-fec045655d5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.453023] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5557ca6a-fa02-44de-b7fa-80f06cb473ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.496679] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16e6538-de5e-41dc-8e21-b05a59caf073 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.504912] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df904030-0089-4bc8-b446-8d4dbe040e70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.525071] env[62619]: DEBUG nova.compute.manager [req-c715634c-261f-4b5c-9be7-8bfdc01b6db9 req-ba782502-04d1-4ec4-9d1d-94a38ed78f5d service nova] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Detach interface failed, port_id=61e78327-dbb0-497b-be4b-14a0a1d8a5de, reason: Instance 5b1008fb-7c0a-4e12-90f8-119a82ea62f1 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1598.535696] env[62619]: DEBUG nova.compute.provider_tree [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1598.584929] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1598.585159] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1598.585385] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Deleting the datastore file [datastore1] 4b2e9965-cbd4-4d98-b003-436b4a8c913e {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1598.585612] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02feddea-a4cd-4d64-b5df-bfa799e3803c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.595663] env[62619]: DEBUG oslo_vmware.api [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for the task: (returnval){ [ 1598.595663] env[62619]: value = "task-1777843" [ 1598.595663] env[62619]: _type = "Task" [ 1598.595663] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.606726] env[62619]: DEBUG oslo_vmware.api [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.640134] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777841, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.809296] env[62619]: INFO nova.compute.manager [-] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Took 1.27 seconds to deallocate network for instance. [ 1598.895693] env[62619]: DEBUG nova.network.neutron [req-d97d39e6-2afa-4dad-a430-3477117b9d2f req-cf009fdb-2420-4b3a-ae50-f0f4a8207691 service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Updated VIF entry in instance network info cache for port fac1bf3e-4c83-47cb-9a34-2a16035eb800. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1598.896189] env[62619]: DEBUG nova.network.neutron [req-d97d39e6-2afa-4dad-a430-3477117b9d2f req-cf009fdb-2420-4b3a-ae50-f0f4a8207691 service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Updating instance_info_cache with network_info: [{"id": "fac1bf3e-4c83-47cb-9a34-2a16035eb800", "address": "fa:16:3e:b5:f2:52", "network": {"id": "712c7a9c-8039-44f9-91d1-27991eef432a", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-366397882-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7dea13f34f140dd98291849f66720ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfac1bf3e-4c", "ovs_interfaceid": "fac1bf3e-4c83-47cb-9a34-2a16035eb800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.931489] env[62619]: DEBUG nova.network.neutron [-] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.948175] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522f928c-d793-8ac4-d18f-fec045655d5f, 'name': SearchDatastore_Task, 'duration_secs': 0.071101} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.949224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.949550] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a5e4c524-7cc8-4981-899e-1a7c80fac2bd/a5e4c524-7cc8-4981-899e-1a7c80fac2bd.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1598.949863] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77ecd58a-4150-4a33-a7fd-df3fcb222b5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.960972] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1598.960972] env[62619]: value = "task-1777844" [ 1598.960972] env[62619]: _type = "Task" [ 1598.960972] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.972835] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.071382] env[62619]: DEBUG nova.scheduler.client.report [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 91 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1599.071783] env[62619]: DEBUG nova.compute.provider_tree [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 91 to 92 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1599.071844] env[62619]: DEBUG nova.compute.provider_tree [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1599.107765] env[62619]: DEBUG oslo_vmware.api [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.140355] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777841, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.317071] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.399619] env[62619]: DEBUG oslo_concurrency.lockutils [req-d97d39e6-2afa-4dad-a430-3477117b9d2f req-cf009fdb-2420-4b3a-ae50-f0f4a8207691 service nova] Releasing lock "refresh_cache-a6ba8114-0261-4894-98c0-9e0360f6d256" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.434677] env[62619]: INFO nova.compute.manager [-] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Took 1.30 seconds to deallocate network for instance. [ 1599.473523] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.578226] env[62619]: DEBUG oslo_concurrency.lockutils [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.166s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.581067] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.373s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.583075] env[62619]: INFO nova.compute.claims [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1599.603120] env[62619]: INFO nova.scheduler.client.report [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted allocations for instance 0a80942c-eb86-480b-ab7b-33112dd90d28 [ 1599.611935] env[62619]: DEBUG oslo_vmware.api [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.641788] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777841, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.943163] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.975330] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.111383] env[62619]: DEBUG oslo_vmware.api [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.117746] env[62619]: DEBUG oslo_concurrency.lockutils [None req-348dc754-9e4c-4721-aa27-4a3a1729ef2f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "0a80942c-eb86-480b-ab7b-33112dd90d28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.493s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.142907] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777841, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.211188] env[62619]: DEBUG nova.compute.manager [req-2de61096-c499-4392-a3a1-161ed7fce926 req-aec8709a-3116-48e2-a3eb-a85824098f88 service nova] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Received event network-vif-deleted-2f30eb51-192e-4918-b660-2c50f9d59bb2 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1600.476702] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.612833] env[62619]: DEBUG oslo_vmware.api [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Task: {'id': task-1777843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.948257} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.616083] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1600.616285] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1600.616558] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1600.616816] env[62619]: INFO nova.compute.manager [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Took 2.97 seconds to destroy the instance on the hypervisor. [ 1600.617100] env[62619]: DEBUG oslo.service.loopingcall [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1600.617567] env[62619]: DEBUG nova.compute.manager [-] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1600.617692] env[62619]: DEBUG nova.network.neutron [-] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1600.642382] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777841, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.980894] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777844, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.061679] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.061900] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.119092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1af5da-8dee-425f-ac55-9df22cb5617e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.127320] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902d8189-21dd-4e00-961a-7e1833cdeb70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.162951] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a062673e-705f-49a4-b3c2-f489395e4d14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.168616] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777841, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.536611} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.169329] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/88432144-700d-4829-a1f6-4d35530dfc87/88432144-700d-4829-a1f6-4d35530dfc87.vmdk to [datastore1] ec56c824-5f9a-47bf-bcd6-e456ddaad2f2/ec56c824-5f9a-47bf-bcd6-e456ddaad2f2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1601.170103] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2d4dc8-59ff-461a-82ee-962a815d0323 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.176103] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbaa8ee-53c7-4541-9c3b-9f78620808c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.203864] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] ec56c824-5f9a-47bf-bcd6-e456ddaad2f2/ec56c824-5f9a-47bf-bcd6-e456ddaad2f2.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1601.205426] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f25a5f7-4807-449f-b36a-02cc0903580f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.231449] env[62619]: DEBUG nova.compute.provider_tree [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1601.241266] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1601.241266] env[62619]: value = "task-1777845" [ 1601.241266] env[62619]: _type = "Task" [ 1601.241266] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.250949] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777845, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.371705] env[62619]: DEBUG nova.compute.manager [req-19c1aca9-117a-4708-9aaf-96b92fe3cbbf req-5fc88a9c-a63f-4b24-b52d-d6d2e02064b2 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Received event network-vif-deleted-cab4b941-1153-4182-984e-3286233288af {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1601.371934] env[62619]: INFO nova.compute.manager [req-19c1aca9-117a-4708-9aaf-96b92fe3cbbf req-5fc88a9c-a63f-4b24-b52d-d6d2e02064b2 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Neutron deleted interface cab4b941-1153-4182-984e-3286233288af; detaching it from the instance and deleting it from the info cache [ 1601.372143] env[62619]: DEBUG nova.network.neutron [req-19c1aca9-117a-4708-9aaf-96b92fe3cbbf req-5fc88a9c-a63f-4b24-b52d-d6d2e02064b2 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.475678] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777844, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.036469} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.475925] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a5e4c524-7cc8-4981-899e-1a7c80fac2bd/a5e4c524-7cc8-4981-899e-1a7c80fac2bd.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1601.477033] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1601.477033] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8388ba1d-92d3-4404-b629-d1541c0fa0df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.483390] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1601.483390] env[62619]: value = "task-1777846" [ 1601.483390] env[62619]: _type = "Task" [ 1601.483390] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.492560] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777846, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.564677] env[62619]: DEBUG nova.compute.manager [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1601.734949] env[62619]: DEBUG nova.scheduler.client.report [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1601.750644] env[62619]: DEBUG nova.network.neutron [-] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.752267] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777845, 'name': ReconfigVM_Task, 'duration_secs': 0.449576} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.753048] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Reconfigured VM instance instance-00000040 to attach disk [datastore1] ec56c824-5f9a-47bf-bcd6-e456ddaad2f2/ec56c824-5f9a-47bf-bcd6-e456ddaad2f2.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1601.753684] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b56e9643-5b47-41f5-973b-d1f2c8ca84d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.760485] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1601.760485] env[62619]: value = "task-1777847" [ 1601.760485] env[62619]: _type = "Task" [ 1601.760485] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.770362] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777847, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.874433] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3818e4f0-9908-4a2b-be14-43a72dcfc60c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.885763] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90808cd1-5782-4d16-bf23-102acbd7482f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.896638] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "9014ef05-64d1-4bd6-9f2e-db58003b6520" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.896894] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "9014ef05-64d1-4bd6-9f2e-db58003b6520" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.897181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "9014ef05-64d1-4bd6-9f2e-db58003b6520-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.897380] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "9014ef05-64d1-4bd6-9f2e-db58003b6520-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.897452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "9014ef05-64d1-4bd6-9f2e-db58003b6520-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.900179] env[62619]: INFO nova.compute.manager [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Terminating instance [ 1601.925357] env[62619]: DEBUG nova.compute.manager [req-19c1aca9-117a-4708-9aaf-96b92fe3cbbf req-5fc88a9c-a63f-4b24-b52d-d6d2e02064b2 service nova] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Detach interface failed, port_id=cab4b941-1153-4182-984e-3286233288af, reason: Instance 4b2e9965-cbd4-4d98-b003-436b4a8c913e could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1601.993207] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777846, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106626} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.993495] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1601.994302] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795015b9-1edb-4ac0-b782-577cfbac3925 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.017246] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] a5e4c524-7cc8-4981-899e-1a7c80fac2bd/a5e4c524-7cc8-4981-899e-1a7c80fac2bd.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1602.017584] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-082348d8-b200-40bf-bdc0-cb2261a5adda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.040434] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1602.040434] env[62619]: value = "task-1777848" [ 1602.040434] env[62619]: _type = "Task" [ 1602.040434] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.050450] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777848, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.102346] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.241792] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.241792] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1602.244331] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.913s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.244583] env[62619]: DEBUG nova.objects.instance [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lazy-loading 'resources' on Instance uuid fab3d689-9e30-4afd-b0cc-49c6d2870c50 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1602.253633] env[62619]: INFO nova.compute.manager [-] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Took 1.64 seconds to deallocate network for instance. [ 1602.271484] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777847, 'name': Rename_Task, 'duration_secs': 0.257263} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.272511] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1602.272511] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00651f14-1a25-4932-aeaf-b1bc6e912fcb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.279442] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1602.279442] env[62619]: value = "task-1777849" [ 1602.279442] env[62619]: _type = "Task" [ 1602.279442] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.287670] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777849, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.404680] env[62619]: DEBUG nova.compute.manager [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1602.404888] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1602.406227] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44f05be-a790-45e7-99cb-863f882a3ab9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.414043] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1602.414156] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9165af01-2a71-43c4-bfbc-864ed65c2fcf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.421637] env[62619]: DEBUG oslo_vmware.api [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1602.421637] env[62619]: value = "task-1777850" [ 1602.421637] env[62619]: _type = "Task" [ 1602.421637] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.430686] env[62619]: DEBUG oslo_vmware.api [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777850, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.551626] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777848, 'name': ReconfigVM_Task, 'duration_secs': 0.413908} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.552026] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Reconfigured VM instance instance-00000041 to attach disk [datastore1] a5e4c524-7cc8-4981-899e-1a7c80fac2bd/a5e4c524-7cc8-4981-899e-1a7c80fac2bd.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1602.553187] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2ad29f5-32e0-4a05-8751-d695ed683c37 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.558811] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1602.558811] env[62619]: value = "task-1777851" [ 1602.558811] env[62619]: _type = "Task" [ 1602.558811] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.567206] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777851, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.752061] env[62619]: DEBUG nova.compute.utils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1602.753991] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1602.753991] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1602.761737] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.790243] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777849, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.838172] env[62619]: DEBUG nova.policy [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cf40e9c8cc34d578bb40763740eb5cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbc6315c41fe451a94e28d68cab87b2e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1602.935146] env[62619]: DEBUG oslo_vmware.api [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777850, 'name': PowerOffVM_Task, 'duration_secs': 0.310626} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.935446] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1602.935614] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1602.935872] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c27041f3-37a0-4baf-a01e-37744ad6ae7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.043669] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1603.043886] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1603.044076] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Deleting the datastore file [datastore1] 9014ef05-64d1-4bd6-9f2e-db58003b6520 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1603.044400] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edc258ed-a3e6-435b-bc78-bb80c5287dc4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.052257] env[62619]: DEBUG oslo_vmware.api [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for the task: (returnval){ [ 1603.052257] env[62619]: value = "task-1777853" [ 1603.052257] env[62619]: _type = "Task" [ 1603.052257] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.067839] env[62619]: DEBUG oslo_vmware.api [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777853, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.072702] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777851, 'name': Rename_Task, 'duration_secs': 0.235408} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.072992] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1603.073255] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51cf9f2c-18b3-457c-84aa-55b4e2de67bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.081306] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1603.081306] env[62619]: value = "task-1777854" [ 1603.081306] env[62619]: _type = "Task" [ 1603.081306] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.093525] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.216898] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ce53fa-470e-40e1-8757-66f9818e3950 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.224471] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c598d2-2cbe-4f42-83e5-5e74f00779e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.230185] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Successfully created port: 960fe70c-58cb-4649-a282-0bbe4a7ae3d3 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1603.256563] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35241204-aa0a-4bed-bce3-1ebfc183f881 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.259576] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1603.267722] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fbdbbd-9287-4bc9-9395-7608acae609f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.286246] env[62619]: DEBUG nova.compute.provider_tree [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1603.296695] env[62619]: DEBUG oslo_vmware.api [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777849, 'name': PowerOnVM_Task, 'duration_secs': 0.741618} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.297595] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1603.297846] env[62619]: INFO nova.compute.manager [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Took 14.99 seconds to spawn the instance on the hypervisor. [ 1603.298100] env[62619]: DEBUG nova.compute.manager [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1603.298971] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6517d333-b3c2-4a3c-aba7-6d26f3f79cb5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.562419] env[62619]: DEBUG oslo_vmware.api [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Task: {'id': task-1777853, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.390051} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.562910] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1603.563318] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1603.563644] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1603.563953] env[62619]: INFO nova.compute.manager [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1603.564425] env[62619]: DEBUG oslo.service.loopingcall [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1603.564755] env[62619]: DEBUG nova.compute.manager [-] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1603.565035] env[62619]: DEBUG nova.network.neutron [-] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1603.599911] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777854, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.792714] env[62619]: DEBUG nova.scheduler.client.report [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1603.824461] env[62619]: INFO nova.compute.manager [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Took 42.05 seconds to build instance. [ 1603.898397] env[62619]: DEBUG nova.compute.manager [req-937e87e3-a656-41d1-bffc-a222a0b91c78 req-cc28ee81-b842-496a-9907-df50c1c6a045 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Received event network-vif-deleted-13b7e2bb-07ca-4faa-aa62-69635847b2f7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1603.898550] env[62619]: INFO nova.compute.manager [req-937e87e3-a656-41d1-bffc-a222a0b91c78 req-cc28ee81-b842-496a-9907-df50c1c6a045 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Neutron deleted interface 13b7e2bb-07ca-4faa-aa62-69635847b2f7; detaching it from the instance and deleting it from the info cache [ 1603.898724] env[62619]: DEBUG nova.network.neutron [req-937e87e3-a656-41d1-bffc-a222a0b91c78 req-cc28ee81-b842-496a-9907-df50c1c6a045 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.093500] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777854, 'name': PowerOnVM_Task, 'duration_secs': 0.745915} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.093951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.094255] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1604.094481] env[62619]: INFO nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Took 13.12 seconds to spawn the instance on the hypervisor. [ 1604.094748] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1604.095461] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67dada78-d8b3-4414-b3ce-2b5b302aa58f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.271581] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1604.299951] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1604.300215] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1604.300370] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1604.300551] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1604.300696] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1604.300839] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1604.301055] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1604.301216] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1604.301380] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1604.301539] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1604.301705] env[62619]: DEBUG nova.virt.hardware [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1604.302582] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.058s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.305118] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3096ab28-83cd-464a-9205-c4b65d90afca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.307977] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.960s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.308214] env[62619]: DEBUG nova.objects.instance [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lazy-loading 'resources' on Instance uuid 1f86b805-0fde-4bda-9a94-d440a670e23c {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1604.315233] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75916c2c-f831-4b10-8361-ed67a586854d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.330012] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3bd9201-29a6-4ea2-bdec-b6a223227470 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.312s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.330273] env[62619]: DEBUG nova.network.neutron [-] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.332197] env[62619]: INFO nova.scheduler.client.report [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Deleted allocations for instance fab3d689-9e30-4afd-b0cc-49c6d2870c50 [ 1604.333074] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.239s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.333290] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.333488] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.333651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.337430] env[62619]: INFO nova.compute.manager [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Terminating instance [ 1604.401996] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b5d4618c-bbf2-4b90-a1c1-b30928bd3f65 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.411566] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702daece-945e-4376-8581-e374e4fd3232 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.445364] env[62619]: DEBUG nova.compute.manager [req-937e87e3-a656-41d1-bffc-a222a0b91c78 req-cc28ee81-b842-496a-9907-df50c1c6a045 service nova] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Detach interface failed, port_id=13b7e2bb-07ca-4faa-aa62-69635847b2f7, reason: Instance 9014ef05-64d1-4bd6-9f2e-db58003b6520 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1604.613489] env[62619]: INFO nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Took 41.80 seconds to build instance. [ 1604.798664] env[62619]: DEBUG nova.compute.manager [req-6eecac9d-5e2d-43fc-bbf8-f61df86695e4 req-569762c8-9ea3-4d33-9967-85145ca0375b service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Received event network-vif-plugged-960fe70c-58cb-4649-a282-0bbe4a7ae3d3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1604.798917] env[62619]: DEBUG oslo_concurrency.lockutils [req-6eecac9d-5e2d-43fc-bbf8-f61df86695e4 req-569762c8-9ea3-4d33-9967-85145ca0375b service nova] Acquiring lock "0f925028-c376-438f-8a56-deaa23047199-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.799214] env[62619]: DEBUG oslo_concurrency.lockutils [req-6eecac9d-5e2d-43fc-bbf8-f61df86695e4 req-569762c8-9ea3-4d33-9967-85145ca0375b service nova] Lock "0f925028-c376-438f-8a56-deaa23047199-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.799396] env[62619]: DEBUG oslo_concurrency.lockutils [req-6eecac9d-5e2d-43fc-bbf8-f61df86695e4 req-569762c8-9ea3-4d33-9967-85145ca0375b service nova] Lock "0f925028-c376-438f-8a56-deaa23047199-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.799575] env[62619]: DEBUG nova.compute.manager [req-6eecac9d-5e2d-43fc-bbf8-f61df86695e4 req-569762c8-9ea3-4d33-9967-85145ca0375b service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] No waiting events found dispatching network-vif-plugged-960fe70c-58cb-4649-a282-0bbe4a7ae3d3 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1604.799784] env[62619]: WARNING nova.compute.manager [req-6eecac9d-5e2d-43fc-bbf8-f61df86695e4 req-569762c8-9ea3-4d33-9967-85145ca0375b service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Received unexpected event network-vif-plugged-960fe70c-58cb-4649-a282-0bbe4a7ae3d3 for instance with vm_state building and task_state spawning. [ 1604.837818] env[62619]: INFO nova.compute.manager [-] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Took 1.27 seconds to deallocate network for instance. [ 1604.847735] env[62619]: DEBUG nova.compute.manager [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1604.847735] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1604.848682] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3db74f53-47f5-4907-8732-24e733594812 tempest-ServerExternalEventsTest-1873159408 tempest-ServerExternalEventsTest-1873159408-project-member] Lock "fab3d689-9e30-4afd-b0cc-49c6d2870c50" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.839s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.850191] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b7f4e7-bd27-426e-8334-48ac76e54856 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.866317] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1604.870348] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e767329-3cc8-4e35-b160-1cd9cb550bf4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.880873] env[62619]: DEBUG oslo_vmware.api [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1604.880873] env[62619]: value = "task-1777855" [ 1604.880873] env[62619]: _type = "Task" [ 1604.880873] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.888758] env[62619]: DEBUG oslo_vmware.api [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777855, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.003464] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Successfully updated port: 960fe70c-58cb-4649-a282-0bbe4a7ae3d3 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1605.115732] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.352s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.314609] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d48257-4746-4805-aad7-91af81818630 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.323267] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0db300b-02fd-4948-bdb2-139391c8aa19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.363400] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.364721] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d889a68d-fe11-4b18-952c-9d313b21f1c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.372516] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21311213-d1ac-4ec1-b90a-d68bd51c1fc0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.387098] env[62619]: DEBUG nova.compute.provider_tree [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1605.396048] env[62619]: DEBUG oslo_vmware.api [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777855, 'name': PowerOffVM_Task, 'duration_secs': 0.389927} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.396954] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1605.397151] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1605.397394] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14108bf9-2825-40c4-948d-77b71afcb195 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.503847] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "refresh_cache-0f925028-c376-438f-8a56-deaa23047199" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1605.504045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "refresh_cache-0f925028-c376-438f-8a56-deaa23047199" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1605.504407] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1605.711358] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1605.711611] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1605.711799] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleting the datastore file [datastore1] ec56c824-5f9a-47bf-bcd6-e456ddaad2f2 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1605.712084] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60ea09fb-5e3b-4315-927f-5f2c0db4a3b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.719277] env[62619]: DEBUG oslo_vmware.api [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1605.719277] env[62619]: value = "task-1777857" [ 1605.719277] env[62619]: _type = "Task" [ 1605.719277] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.729180] env[62619]: DEBUG oslo_vmware.api [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.760116] env[62619]: DEBUG oslo_vmware.rw_handles [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ade7ea-e68d-8fd9-0f26-4a145928c99b/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1605.761046] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47117452-e73b-4aac-b429-57105cccc226 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.767268] env[62619]: DEBUG oslo_vmware.rw_handles [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ade7ea-e68d-8fd9-0f26-4a145928c99b/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1605.767435] env[62619]: ERROR oslo_vmware.rw_handles [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ade7ea-e68d-8fd9-0f26-4a145928c99b/disk-0.vmdk due to incomplete transfer. [ 1605.767652] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b2c6fe10-a6c4-4eda-b2b3-5d08689da9ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.774578] env[62619]: DEBUG oslo_vmware.rw_handles [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ade7ea-e68d-8fd9-0f26-4a145928c99b/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1605.774769] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Uploaded image b6c3d7df-6de8-4488-be06-95164f241d93 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1605.777225] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1605.777473] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ddddd5a1-bab6-4f09-ae63-b05a9a9e8f3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.783851] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1605.783851] env[62619]: value = "task-1777858" [ 1605.783851] env[62619]: _type = "Task" [ 1605.783851] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.791532] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777858, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.892514] env[62619]: DEBUG nova.scheduler.client.report [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1606.046519] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1606.230990] env[62619]: DEBUG oslo_vmware.api [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.296229] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777858, 'name': Destroy_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.306707] env[62619]: DEBUG nova.network.neutron [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Updating instance_info_cache with network_info: [{"id": "960fe70c-58cb-4649-a282-0bbe4a7ae3d3", "address": "fa:16:3e:f1:97:de", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap960fe70c-58", "ovs_interfaceid": "960fe70c-58cb-4649-a282-0bbe4a7ae3d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1606.400415] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.401443] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.052s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.402095] env[62619]: DEBUG nova.objects.instance [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lazy-loading 'resources' on Instance uuid cd8b8828-79cf-4a7c-b018-b8bd745aaa45 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1606.428276] env[62619]: INFO nova.scheduler.client.report [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Deleted allocations for instance 1f86b805-0fde-4bda-9a94-d440a670e23c [ 1606.730317] env[62619]: DEBUG oslo_vmware.api [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.528803} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.730610] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1606.730792] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1606.730967] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1606.731153] env[62619]: INFO nova.compute.manager [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Took 1.88 seconds to destroy the instance on the hypervisor. [ 1606.731391] env[62619]: DEBUG oslo.service.loopingcall [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.731589] env[62619]: DEBUG nova.compute.manager [-] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1606.731681] env[62619]: DEBUG nova.network.neutron [-] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1606.793996] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777858, 'name': Destroy_Task, 'duration_secs': 0.536108} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.794370] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Destroyed the VM [ 1606.794860] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1606.795171] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-477f4999-4db5-46de-a346-6b83018ca15c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.801667] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1606.801667] env[62619]: value = "task-1777859" [ 1606.801667] env[62619]: _type = "Task" [ 1606.801667] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.812152] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "refresh_cache-0f925028-c376-438f-8a56-deaa23047199" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.812393] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Instance network_info: |[{"id": "960fe70c-58cb-4649-a282-0bbe4a7ae3d3", "address": "fa:16:3e:f1:97:de", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap960fe70c-58", "ovs_interfaceid": "960fe70c-58cb-4649-a282-0bbe4a7ae3d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1606.812679] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777859, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.813580] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:97:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '960fe70c-58cb-4649-a282-0bbe4a7ae3d3', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1606.826187] env[62619]: DEBUG oslo.service.loopingcall [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.827419] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f925028-c376-438f-8a56-deaa23047199] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1606.827526] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d2b1317-678a-4b78-a81d-f95f51d1d715 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.845618] env[62619]: DEBUG nova.compute.manager [req-9f36191d-db9f-4be1-a80c-df55b1741c15 req-8940f29f-0a41-4cb8-9dcc-b1cc2fa12c73 service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Received event network-changed-960fe70c-58cb-4649-a282-0bbe4a7ae3d3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1606.845824] env[62619]: DEBUG nova.compute.manager [req-9f36191d-db9f-4be1-a80c-df55b1741c15 req-8940f29f-0a41-4cb8-9dcc-b1cc2fa12c73 service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Refreshing instance network info cache due to event network-changed-960fe70c-58cb-4649-a282-0bbe4a7ae3d3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1606.846049] env[62619]: DEBUG oslo_concurrency.lockutils [req-9f36191d-db9f-4be1-a80c-df55b1741c15 req-8940f29f-0a41-4cb8-9dcc-b1cc2fa12c73 service nova] Acquiring lock "refresh_cache-0f925028-c376-438f-8a56-deaa23047199" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.846223] env[62619]: DEBUG oslo_concurrency.lockutils [req-9f36191d-db9f-4be1-a80c-df55b1741c15 req-8940f29f-0a41-4cb8-9dcc-b1cc2fa12c73 service nova] Acquired lock "refresh_cache-0f925028-c376-438f-8a56-deaa23047199" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.846383] env[62619]: DEBUG nova.network.neutron [req-9f36191d-db9f-4be1-a80c-df55b1741c15 req-8940f29f-0a41-4cb8-9dcc-b1cc2fa12c73 service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Refreshing network info cache for port 960fe70c-58cb-4649-a282-0bbe4a7ae3d3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1606.852924] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1606.852924] env[62619]: value = "task-1777860" [ 1606.852924] env[62619]: _type = "Task" [ 1606.852924] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.861444] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777860, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.937844] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2ac53dc-4fd0-4929-a3a2-7ddc8f12d17a tempest-ServersTestMultiNic-518494389 tempest-ServersTestMultiNic-518494389-project-member] Lock "1f86b805-0fde-4bda-9a94-d440a670e23c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.005s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.311931] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777859, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.314804] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6793a0c-789c-46a1-9cdf-b29d155dc8f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.327188] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135a8dbf-a014-4e46-b758-cfd2f5714ca4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.362170] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf23fa5-b14c-4743-9600-643a6c728e94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.376624] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777860, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.377981] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9b0c6e-88c2-45a2-a69a-715ebda6c487 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.393604] env[62619]: DEBUG nova.compute.provider_tree [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1607.582025] env[62619]: DEBUG nova.network.neutron [-] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.719091] env[62619]: DEBUG nova.network.neutron [req-9f36191d-db9f-4be1-a80c-df55b1741c15 req-8940f29f-0a41-4cb8-9dcc-b1cc2fa12c73 service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Updated VIF entry in instance network info cache for port 960fe70c-58cb-4649-a282-0bbe4a7ae3d3. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1607.719504] env[62619]: DEBUG nova.network.neutron [req-9f36191d-db9f-4be1-a80c-df55b1741c15 req-8940f29f-0a41-4cb8-9dcc-b1cc2fa12c73 service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Updating instance_info_cache with network_info: [{"id": "960fe70c-58cb-4649-a282-0bbe4a7ae3d3", "address": "fa:16:3e:f1:97:de", "network": {"id": "0e501edc-55e3-4ded-9e7c-07e29223ba50", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1446821690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc6315c41fe451a94e28d68cab87b2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap960fe70c-58", "ovs_interfaceid": "960fe70c-58cb-4649-a282-0bbe4a7ae3d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.812784] env[62619]: DEBUG oslo_vmware.api [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777859, 'name': RemoveSnapshot_Task, 'duration_secs': 0.848845} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.813069] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1607.813296] env[62619]: INFO nova.compute.manager [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Took 22.29 seconds to snapshot the instance on the hypervisor. [ 1607.868380] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777860, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.896842] env[62619]: DEBUG nova.scheduler.client.report [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1608.085560] env[62619]: INFO nova.compute.manager [-] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Took 1.35 seconds to deallocate network for instance. [ 1608.222013] env[62619]: DEBUG oslo_concurrency.lockutils [req-9f36191d-db9f-4be1-a80c-df55b1741c15 req-8940f29f-0a41-4cb8-9dcc-b1cc2fa12c73 service nova] Releasing lock "refresh_cache-0f925028-c376-438f-8a56-deaa23047199" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.359530] env[62619]: DEBUG nova.compute.manager [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Found 3 images (rotation: 2) {{(pid=62619) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4924}} [ 1608.359750] env[62619]: DEBUG nova.compute.manager [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Rotating out 1 backups {{(pid=62619) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4932}} [ 1608.359884] env[62619]: DEBUG nova.compute.manager [None req-52483295-b9b0-492c-8ddc-2638991f33a8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Deleting image cce1f626-0c24-4d2e-b753-64401418a36e {{(pid=62619) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4937}} [ 1608.379466] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777860, 'name': CreateVM_Task, 'duration_secs': 1.391818} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.379466] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f925028-c376-438f-8a56-deaa23047199] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1608.383719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.383719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.384423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1608.384483] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a40d193-fa5f-4624-86a0-3bca55db4aee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.390678] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1608.390678] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5233291d-efbf-8f39-375f-bf93a3caa8e4" [ 1608.390678] env[62619]: _type = "Task" [ 1608.390678] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.404842] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.003s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.406868] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5233291d-efbf-8f39-375f-bf93a3caa8e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.407489] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.645s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.409268] env[62619]: INFO nova.compute.claims [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1608.439432] env[62619]: INFO nova.scheduler.client.report [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleted allocations for instance cd8b8828-79cf-4a7c-b018-b8bd745aaa45 [ 1608.592668] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.903045] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5233291d-efbf-8f39-375f-bf93a3caa8e4, 'name': SearchDatastore_Task, 'duration_secs': 0.020283} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.903352] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.903577] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1608.903819] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.903933] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.905844] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1608.905844] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-232e5873-b6a4-487e-a0d2-04a4f28643fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.918539] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1608.921322] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1608.921322] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e1f2dfb-9b0b-4be7-a5b2-7f2b13faa6e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.925009] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1608.925009] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a5ca7e-0d56-0134-50dc-0150cd977194" [ 1608.925009] env[62619]: _type = "Task" [ 1608.925009] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.939573] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a5ca7e-0d56-0134-50dc-0150cd977194, 'name': SearchDatastore_Task, 'duration_secs': 0.009484} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.939573] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0866ea22-7134-479d-b07f-b16fc3871936 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.953643] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1608.953643] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5226a00c-7a20-6247-10fa-c421f06c11ce" [ 1608.953643] env[62619]: _type = "Task" [ 1608.953643] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.953643] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de336900-918d-4c34-b175-73df671733b5 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "cd8b8828-79cf-4a7c-b018-b8bd745aaa45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.695s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.967773] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5226a00c-7a20-6247-10fa-c421f06c11ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.184854] env[62619]: DEBUG nova.compute.manager [req-9601c6ec-0a84-4d51-a44d-a23e8ab4e8ea req-ee5fbbdc-084a-488c-9e49-639dc61db504 service nova] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Received event network-vif-deleted-2e09e8e9-5dd5-425b-9bed-ed1f14f42a12 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1609.214191] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "b6aae13f-0711-4421-9d55-de7ece3e4b89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.214435] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "b6aae13f-0711-4421-9d55-de7ece3e4b89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.214663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "b6aae13f-0711-4421-9d55-de7ece3e4b89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.214841] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "b6aae13f-0711-4421-9d55-de7ece3e4b89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.215649] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "b6aae13f-0711-4421-9d55-de7ece3e4b89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.221147] env[62619]: INFO nova.compute.manager [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Terminating instance [ 1609.467973] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5226a00c-7a20-6247-10fa-c421f06c11ce, 'name': SearchDatastore_Task, 'duration_secs': 0.01739} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.471772] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.471772] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0f925028-c376-438f-8a56-deaa23047199/0f925028-c376-438f-8a56-deaa23047199.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1609.471772] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d934e10-d1b0-4552-9e16-235c0ab7dd0f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.481011] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1609.481011] env[62619]: value = "task-1777861" [ 1609.481011] env[62619]: _type = "Task" [ 1609.481011] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.494724] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777861, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.726041] env[62619]: DEBUG nova.compute.manager [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1609.726041] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1609.729311] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580591bb-ed9e-48e1-9902-6bcbe21beec8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.740740] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1609.741023] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5fbf87cf-4952-4c7b-8681-3286b01b22ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.752319] env[62619]: DEBUG oslo_vmware.api [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1609.752319] env[62619]: value = "task-1777862" [ 1609.752319] env[62619]: _type = "Task" [ 1609.752319] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.771783] env[62619]: DEBUG oslo_vmware.api [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777862, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.998446] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777861, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.004285] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad02028-534a-4b6a-9611-f63bd54e424b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.020028] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9a468e-38ed-43d2-a07c-da71aff19d08 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.060848] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e331348-616c-42df-b947-eabc09543940 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.070733] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41f33b7-78e7-4138-9cdd-da685d2d2460 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.087725] env[62619]: DEBUG nova.compute.provider_tree [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1610.096276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "1257a23e-3beb-4357-9322-4b84c87d0c35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.096276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1257a23e-3beb-4357-9322-4b84c87d0c35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.265506] env[62619]: DEBUG oslo_vmware.api [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777862, 'name': PowerOffVM_Task, 'duration_secs': 0.305356} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.265506] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1610.265506] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1610.265506] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30de1db8-ad77-435e-b889-c1dbfaab3340 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.339188] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1610.339443] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1610.339665] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleting the datastore file [datastore1] b6aae13f-0711-4421-9d55-de7ece3e4b89 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1610.339971] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59567336-64e9-4d34-965b-99ebb17497d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.346700] env[62619]: DEBUG oslo_vmware.api [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1610.346700] env[62619]: value = "task-1777864" [ 1610.346700] env[62619]: _type = "Task" [ 1610.346700] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.354409] env[62619]: DEBUG oslo_vmware.api [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.493721] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777861, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616802} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.494024] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0f925028-c376-438f-8a56-deaa23047199/0f925028-c376-438f-8a56-deaa23047199.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1610.494239] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1610.494643] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cdfa485e-2dd5-4e83-bfc8-8afed72d4631 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.501835] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1610.501835] env[62619]: value = "task-1777865" [ 1610.501835] env[62619]: _type = "Task" [ 1610.501835] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.512565] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777865, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.591811] env[62619]: DEBUG nova.scheduler.client.report [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1610.598571] env[62619]: DEBUG nova.compute.manager [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1610.857596] env[62619]: DEBUG oslo_vmware.api [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.462596} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.857971] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1610.858180] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1610.858357] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1610.858523] env[62619]: INFO nova.compute.manager [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1610.858757] env[62619]: DEBUG oslo.service.loopingcall [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1610.858941] env[62619]: DEBUG nova.compute.manager [-] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1610.859129] env[62619]: DEBUG nova.network.neutron [-] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1611.011230] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777865, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068731} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.011498] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1611.012292] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77426810-c6a8-4b9d-9716-7b5b4e6dfa2c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.037044] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 0f925028-c376-438f-8a56-deaa23047199/0f925028-c376-438f-8a56-deaa23047199.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1611.037359] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-302a1b98-a361-474a-b969-357f069bdfcc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.061114] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1611.061114] env[62619]: value = "task-1777866" [ 1611.061114] env[62619]: _type = "Task" [ 1611.061114] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.067178] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777866, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.099023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.099023] env[62619]: DEBUG nova.compute.manager [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1611.099716] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.332s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.100055] env[62619]: DEBUG nova.objects.instance [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lazy-loading 'resources' on Instance uuid ed34ae20-a891-45aa-8124-f36f264937f8 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1611.123547] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.355014] env[62619]: DEBUG nova.compute.manager [req-54abf860-83c2-4603-bf51-a22450d010b1 req-22b8e188-509e-4398-9e57-f12f54945302 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Received event network-vif-deleted-a7a7cbea-2a80-4996-a6d0-8a345c83069f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1611.355236] env[62619]: INFO nova.compute.manager [req-54abf860-83c2-4603-bf51-a22450d010b1 req-22b8e188-509e-4398-9e57-f12f54945302 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Neutron deleted interface a7a7cbea-2a80-4996-a6d0-8a345c83069f; detaching it from the instance and deleting it from the info cache [ 1611.355415] env[62619]: DEBUG nova.network.neutron [req-54abf860-83c2-4603-bf51-a22450d010b1 req-22b8e188-509e-4398-9e57-f12f54945302 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.569643] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777866, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.604775] env[62619]: DEBUG nova.compute.utils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1611.606222] env[62619]: DEBUG nova.compute.manager [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1611.606384] env[62619]: DEBUG nova.network.neutron [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1611.654355] env[62619]: DEBUG nova.policy [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b30d7e2e4a9447e8b2caa6f7fc30aa65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a8f5f9386ba4dfa869c288a30aaeada', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1611.805847] env[62619]: DEBUG nova.network.neutron [-] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.859444] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24050f39-e1c1-4c8f-8d25-242675a0b80b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.876353] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74729004-88fc-4c62-b33d-5dcdc6f2bef3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.916697] env[62619]: DEBUG nova.compute.manager [req-54abf860-83c2-4603-bf51-a22450d010b1 req-22b8e188-509e-4398-9e57-f12f54945302 service nova] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Detach interface failed, port_id=a7a7cbea-2a80-4996-a6d0-8a345c83069f, reason: Instance b6aae13f-0711-4421-9d55-de7ece3e4b89 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1612.065737] env[62619]: DEBUG nova.network.neutron [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Successfully created port: 89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1612.073112] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777866, 'name': ReconfigVM_Task, 'duration_secs': 0.90711} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.074274] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 0f925028-c376-438f-8a56-deaa23047199/0f925028-c376-438f-8a56-deaa23047199.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1612.076070] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c227882a-8923-4fd3-bc51-d3ac772c64ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.078826] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad361f67-2a01-4da6-92de-3040fa4ba696 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.086679] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d263be71-b069-45d2-b448-7f085be7ef4f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.091765] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1612.091765] env[62619]: value = "task-1777867" [ 1612.091765] env[62619]: _type = "Task" [ 1612.091765] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.122860] env[62619]: DEBUG nova.compute.manager [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1612.133495] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639373e0-797b-4ee8-9d1d-8b95a2ee6feb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.140722] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777867, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.143771] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.143998] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.144208] env[62619]: DEBUG nova.compute.manager [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1612.147191] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c7ebd4-a337-4f96-976b-0f41513fcd2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.151540] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1c022e-1689-4d3b-916d-01a922089404 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.159655] env[62619]: DEBUG nova.compute.manager [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1612.160346] env[62619]: DEBUG nova.objects.instance [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'flavor' on Instance uuid da806d3f-79f0-4188-a2d8-0beeb9dfec1a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1612.171740] env[62619]: DEBUG nova.compute.provider_tree [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1612.312076] env[62619]: INFO nova.compute.manager [-] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Took 1.45 seconds to deallocate network for instance. [ 1612.603614] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777867, 'name': Rename_Task, 'duration_secs': 0.280752} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.604213] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1612.604502] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c42f13d-b6fb-4424-95b8-bae3ab65b4d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.611535] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1612.611535] env[62619]: value = "task-1777868" [ 1612.611535] env[62619]: _type = "Task" [ 1612.611535] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.620855] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777868, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.683822] env[62619]: DEBUG nova.scheduler.client.report [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1612.819490] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.121648] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777868, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.141097] env[62619]: DEBUG nova.compute.manager [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1613.166440] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1613.166674] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1613.167278] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1613.167278] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1613.167278] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1613.167656] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1613.167656] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1613.167743] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1613.167976] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1613.168030] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1613.168171] env[62619]: DEBUG nova.virt.hardware [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1613.169034] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b177120-7600-4c1a-8317-c8c194fa45ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.177152] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda7aba5-b49a-4d6f-99d5-0b484a1326fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.190749] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1613.191484] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.092s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.193786] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-827580a4-a405-4a87-bb8c-4300205bcecf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.195470] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.721s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.196987] env[62619]: INFO nova.compute.claims [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1613.208218] env[62619]: DEBUG oslo_vmware.api [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1613.208218] env[62619]: value = "task-1777869" [ 1613.208218] env[62619]: _type = "Task" [ 1613.208218] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.217544] env[62619]: DEBUG oslo_vmware.api [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777869, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.226814] env[62619]: INFO nova.scheduler.client.report [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Deleted allocations for instance ed34ae20-a891-45aa-8124-f36f264937f8 [ 1613.622765] env[62619]: DEBUG oslo_vmware.api [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777868, 'name': PowerOnVM_Task, 'duration_secs': 0.768298} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.623556] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1613.623928] env[62619]: INFO nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Took 9.35 seconds to spawn the instance on the hypervisor. [ 1613.624252] env[62619]: DEBUG nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1613.625170] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0ffc7e-66c8-48a0-9eb9-695e371c6dbd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.724020] env[62619]: DEBUG oslo_vmware.api [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777869, 'name': PowerOffVM_Task, 'duration_secs': 0.268726} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.724020] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1613.724020] env[62619]: DEBUG nova.compute.manager [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1613.724020] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98976b39-c8db-43af-9cbd-e52395e10334 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.735649] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7c089132-bc2e-4f8e-b817-a36a378df1d2 tempest-VolumesAssistedSnapshotsTest-1211361299 tempest-VolumesAssistedSnapshotsTest-1211361299-project-member] Lock "ed34ae20-a891-45aa-8124-f36f264937f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.360s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.823566] env[62619]: DEBUG nova.compute.manager [req-f43f5626-d8f8-4aaf-8fe5-b3db7d9f20fe req-7ef76def-d3cd-4239-ac86-9f84bb0fa66c service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received event network-vif-plugged-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1613.823777] env[62619]: DEBUG oslo_concurrency.lockutils [req-f43f5626-d8f8-4aaf-8fe5-b3db7d9f20fe req-7ef76def-d3cd-4239-ac86-9f84bb0fa66c service nova] Acquiring lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.823988] env[62619]: DEBUG oslo_concurrency.lockutils [req-f43f5626-d8f8-4aaf-8fe5-b3db7d9f20fe req-7ef76def-d3cd-4239-ac86-9f84bb0fa66c service nova] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.824167] env[62619]: DEBUG oslo_concurrency.lockutils [req-f43f5626-d8f8-4aaf-8fe5-b3db7d9f20fe req-7ef76def-d3cd-4239-ac86-9f84bb0fa66c service nova] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.824327] env[62619]: DEBUG nova.compute.manager [req-f43f5626-d8f8-4aaf-8fe5-b3db7d9f20fe req-7ef76def-d3cd-4239-ac86-9f84bb0fa66c service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] No waiting events found dispatching network-vif-plugged-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1613.824552] env[62619]: WARNING nova.compute.manager [req-f43f5626-d8f8-4aaf-8fe5-b3db7d9f20fe req-7ef76def-d3cd-4239-ac86-9f84bb0fa66c service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received unexpected event network-vif-plugged-89e91bb9-2bd5-4385-b3dd-cee4612bb166 for instance with vm_state building and task_state spawning. [ 1613.866387] env[62619]: DEBUG nova.network.neutron [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Successfully updated port: 89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1613.891511] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquiring lock "d0258646-e687-4198-b7c8-7bd116e3bf18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.891895] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lock "d0258646-e687-4198-b7c8-7bd116e3bf18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.152095] env[62619]: INFO nova.compute.manager [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Took 46.97 seconds to build instance. [ 1614.240182] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9da53c47-6f73-41b4-a90a-ad6c5345578f tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.096s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.370817] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.370817] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.370817] env[62619]: DEBUG nova.network.neutron [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1614.394934] env[62619]: DEBUG nova.compute.manager [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1614.655424] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8c6cd12a-2292-4f8a-9949-d1d9b2e8b0b7 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "0f925028-c376-438f-8a56-deaa23047199" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.841s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.662237] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9d6d8a-3757-4421-9392-3880015e8e70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.673176] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55db6686-0502-44cf-9bcd-bfa7d367ba06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.706826] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d00ad8-41d4-4cbc-85fd-16c2b5aa7010 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.715251] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95ca887-c785-4191-9bc9-5669b22622d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.730993] env[62619]: DEBUG nova.compute.provider_tree [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1614.926846] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.942611] env[62619]: DEBUG nova.network.neutron [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1615.233983] env[62619]: DEBUG nova.scheduler.client.report [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1615.258097] env[62619]: DEBUG nova.network.neutron [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updating instance_info_cache with network_info: [{"id": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "address": "fa:16:3e:78:49:c7", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e91bb9-2b", "ovs_interfaceid": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1615.451413] env[62619]: DEBUG nova.compute.manager [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Stashing vm_state: stopped {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1615.597794] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.598062] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.598273] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.598451] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.598616] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.601848] env[62619]: INFO nova.compute.manager [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Terminating instance [ 1615.739411] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.739937] env[62619]: DEBUG nova.compute.manager [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1615.745681] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.401s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.747586] env[62619]: INFO nova.compute.claims [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1615.760817] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.761121] env[62619]: DEBUG nova.compute.manager [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Instance network_info: |[{"id": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "address": "fa:16:3e:78:49:c7", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e91bb9-2b", "ovs_interfaceid": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1615.761513] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:49:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89e91bb9-2bd5-4385-b3dd-cee4612bb166', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1615.768938] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating folder: Project (0a8f5f9386ba4dfa869c288a30aaeada). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1615.769204] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a3f8892-ef59-4b27-a382-fb95a11c4e85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.788481] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "0f925028-c376-438f-8a56-deaa23047199" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.788481] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "0f925028-c376-438f-8a56-deaa23047199" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.788481] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "0f925028-c376-438f-8a56-deaa23047199-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.788734] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "0f925028-c376-438f-8a56-deaa23047199-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.788734] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "0f925028-c376-438f-8a56-deaa23047199-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.792824] env[62619]: INFO nova.compute.manager [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Terminating instance [ 1615.841911] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Created folder: Project (0a8f5f9386ba4dfa869c288a30aaeada) in parent group-v368875. [ 1615.842255] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating folder: Instances. Parent ref: group-v369062. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1615.842392] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc124a42-246a-4dd6-92a9-cb4d207d5dbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.853053] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Created folder: Instances in parent group-v369062. [ 1615.853589] env[62619]: DEBUG oslo.service.loopingcall [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1615.854406] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1615.854704] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c050659-85da-4c44-b955-5c6f0dade47e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.877212] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1615.877212] env[62619]: value = "task-1777872" [ 1615.877212] env[62619]: _type = "Task" [ 1615.877212] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.886595] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777872, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.978957] env[62619]: DEBUG nova.compute.manager [req-dae93319-2b46-4757-a0a9-ae20212e74d3 req-6c6be8f0-253b-4447-8fd8-091280b98068 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received event network-changed-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1615.979515] env[62619]: DEBUG nova.compute.manager [req-dae93319-2b46-4757-a0a9-ae20212e74d3 req-6c6be8f0-253b-4447-8fd8-091280b98068 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Refreshing instance network info cache due to event network-changed-89e91bb9-2bd5-4385-b3dd-cee4612bb166. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1615.979819] env[62619]: DEBUG oslo_concurrency.lockutils [req-dae93319-2b46-4757-a0a9-ae20212e74d3 req-6c6be8f0-253b-4447-8fd8-091280b98068 service nova] Acquiring lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.979983] env[62619]: DEBUG oslo_concurrency.lockutils [req-dae93319-2b46-4757-a0a9-ae20212e74d3 req-6c6be8f0-253b-4447-8fd8-091280b98068 service nova] Acquired lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.980363] env[62619]: DEBUG nova.network.neutron [req-dae93319-2b46-4757-a0a9-ae20212e74d3 req-6c6be8f0-253b-4447-8fd8-091280b98068 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Refreshing network info cache for port 89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1615.982383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.110460] env[62619]: DEBUG nova.compute.manager [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1616.113531] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1616.113531] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83088429-f0dd-42a7-8986-e8262031ee84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.122079] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1616.122356] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8321ad9-421f-4c12-80b1-811790343f5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.131499] env[62619]: DEBUG oslo_vmware.api [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1616.131499] env[62619]: value = "task-1777873" [ 1616.131499] env[62619]: _type = "Task" [ 1616.131499] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.140507] env[62619]: DEBUG oslo_vmware.api [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.256539] env[62619]: DEBUG nova.compute.utils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1616.260889] env[62619]: DEBUG nova.compute.manager [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1616.261083] env[62619]: DEBUG nova.network.neutron [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1616.297038] env[62619]: DEBUG nova.compute.manager [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1616.297038] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1616.297468] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f74bc29-699b-47b7-a2b3-2c9e36139ec9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.305558] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1616.306187] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7a384ec-b6fb-4d53-8ffa-091a808ee64f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.313329] env[62619]: DEBUG oslo_vmware.api [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1616.313329] env[62619]: value = "task-1777874" [ 1616.313329] env[62619]: _type = "Task" [ 1616.313329] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.321601] env[62619]: DEBUG oslo_vmware.api [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777874, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.345523] env[62619]: DEBUG nova.policy [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6beb0577d52454a82262ea46ab3e796', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf557954d79d4bb1939f6e65d4ed00b5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1616.392019] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777872, 'name': CreateVM_Task, 'duration_secs': 0.41248} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.392019] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1616.392019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.392019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.392019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1616.392019] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12609484-c282-4bbf-89df-cf5048ab74c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.396435] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1616.396435] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e7349d-e42c-050d-8fab-f315d6e9954d" [ 1616.396435] env[62619]: _type = "Task" [ 1616.396435] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.407159] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e7349d-e42c-050d-8fab-f315d6e9954d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.642343] env[62619]: DEBUG oslo_vmware.api [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777873, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.749042] env[62619]: DEBUG nova.network.neutron [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Successfully created port: 4e5b302f-7298-4265-84e1-3839343e7d8c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1616.768900] env[62619]: DEBUG nova.compute.manager [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1616.808677] env[62619]: DEBUG nova.network.neutron [req-dae93319-2b46-4757-a0a9-ae20212e74d3 req-6c6be8f0-253b-4447-8fd8-091280b98068 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updated VIF entry in instance network info cache for port 89e91bb9-2bd5-4385-b3dd-cee4612bb166. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1616.809708] env[62619]: DEBUG nova.network.neutron [req-dae93319-2b46-4757-a0a9-ae20212e74d3 req-6c6be8f0-253b-4447-8fd8-091280b98068 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updating instance_info_cache with network_info: [{"id": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "address": "fa:16:3e:78:49:c7", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e91bb9-2b", "ovs_interfaceid": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.829492] env[62619]: DEBUG oslo_vmware.api [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777874, 'name': PowerOffVM_Task, 'duration_secs': 0.339696} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.829915] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1616.829987] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1616.830373] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84379295-4762-4bb6-9ba7-e2359166044c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.911778] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e7349d-e42c-050d-8fab-f315d6e9954d, 'name': SearchDatastore_Task, 'duration_secs': 0.033834} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.911778] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.911994] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1616.912207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.915300] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.915300] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1616.915300] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-565b1192-0579-453e-b752-4e1affcdd7ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.925080] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1616.929035] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1616.929035] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d345b49-5782-4aa1-b550-ab11d2ffdb17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.930534] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1616.931127] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1616.931127] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleting the datastore file [datastore1] 0f925028-c376-438f-8a56-deaa23047199 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1616.931998] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-224ca961-572e-4610-b0bb-5153471a94d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.934181] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1616.934181] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5258e8e3-6258-df05-1e77-a2cecdf04a22" [ 1616.934181] env[62619]: _type = "Task" [ 1616.934181] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.942178] env[62619]: DEBUG oslo_vmware.api [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1616.942178] env[62619]: value = "task-1777876" [ 1616.942178] env[62619]: _type = "Task" [ 1616.942178] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.949443] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5258e8e3-6258-df05-1e77-a2cecdf04a22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.957560] env[62619]: DEBUG oslo_vmware.api [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777876, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.142464] env[62619]: DEBUG oslo_vmware.api [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777873, 'name': PowerOffVM_Task, 'duration_secs': 0.514449} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.142707] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1617.142870] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1617.145376] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-389ff96e-28ec-48b3-bc7b-931a04d164dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.265769] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1617.266058] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1617.266225] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleting the datastore file [datastore1] a5e4c524-7cc8-4981-899e-1a7c80fac2bd {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1617.266484] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53dc61b6-2fc7-4400-b70e-a25c62df6412 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.276656] env[62619]: DEBUG oslo_vmware.api [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for the task: (returnval){ [ 1617.276656] env[62619]: value = "task-1777878" [ 1617.276656] env[62619]: _type = "Task" [ 1617.276656] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.288517] env[62619]: DEBUG oslo_vmware.api [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.303183] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73dabbd-b97f-48a4-97df-af1c82cf513a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.316487] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4639c51-32cb-44ef-a8f7-924e261eb2df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.317385] env[62619]: DEBUG oslo_concurrency.lockutils [req-dae93319-2b46-4757-a0a9-ae20212e74d3 req-6c6be8f0-253b-4447-8fd8-091280b98068 service nova] Releasing lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.348789] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c83fdd0-b683-4e1b-9ef2-a88d58346043 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.357206] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef2ea52-afdd-4bd2-aaac-193e26691fbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.371422] env[62619]: DEBUG nova.compute.provider_tree [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1617.450170] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5258e8e3-6258-df05-1e77-a2cecdf04a22, 'name': SearchDatastore_Task, 'duration_secs': 0.020367} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.451313] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8992a2a-0a19-42de-aa06-90ea8ff1754b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.456225] env[62619]: DEBUG oslo_vmware.api [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777876, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.375938} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.459025] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1617.459025] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1617.459025] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1617.459025] env[62619]: INFO nova.compute.manager [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: 0f925028-c376-438f-8a56-deaa23047199] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1617.459025] env[62619]: DEBUG oslo.service.loopingcall [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1617.459025] env[62619]: DEBUG nova.compute.manager [-] [instance: 0f925028-c376-438f-8a56-deaa23047199] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1617.459025] env[62619]: DEBUG nova.network.neutron [-] [instance: 0f925028-c376-438f-8a56-deaa23047199] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1617.460558] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1617.460558] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5242fa4c-c0b6-6688-79be-edfbcdbe9ccc" [ 1617.460558] env[62619]: _type = "Task" [ 1617.460558] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.468460] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5242fa4c-c0b6-6688-79be-edfbcdbe9ccc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.781033] env[62619]: DEBUG nova.compute.manager [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1617.792095] env[62619]: DEBUG oslo_vmware.api [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.809443] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1617.810462] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1617.810462] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1617.810462] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1617.810844] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1617.811218] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1617.811793] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1617.812949] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1617.812949] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1617.812949] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1617.813493] env[62619]: DEBUG nova.virt.hardware [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1617.815061] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8caad9c4-2808-4da2-976d-1df8b7a6b15d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.825539] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86ef5a0-701b-49ff-a00e-2dca869208ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.877123] env[62619]: DEBUG nova.scheduler.client.report [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1617.904225] env[62619]: DEBUG nova.compute.manager [req-a05ad63f-5613-400c-94ba-41eedffe8588 req-16a84ea2-2878-48eb-9c66-8c7eab26ba48 service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Received event network-vif-deleted-960fe70c-58cb-4649-a282-0bbe4a7ae3d3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1617.904529] env[62619]: INFO nova.compute.manager [req-a05ad63f-5613-400c-94ba-41eedffe8588 req-16a84ea2-2878-48eb-9c66-8c7eab26ba48 service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Neutron deleted interface 960fe70c-58cb-4649-a282-0bbe4a7ae3d3; detaching it from the instance and deleting it from the info cache [ 1617.904883] env[62619]: DEBUG nova.network.neutron [req-a05ad63f-5613-400c-94ba-41eedffe8588 req-16a84ea2-2878-48eb-9c66-8c7eab26ba48 service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.974302] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5242fa4c-c0b6-6688-79be-edfbcdbe9ccc, 'name': SearchDatastore_Task, 'duration_secs': 0.009923} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.974302] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.974302] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 5cf7ca57-351f-48ab-8758-b30f50cd607f/5cf7ca57-351f-48ab-8758-b30f50cd607f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1617.974593] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79dfc5e8-f410-4407-94a1-ed27d5b1ae38 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.980740] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1617.980740] env[62619]: value = "task-1777879" [ 1617.980740] env[62619]: _type = "Task" [ 1617.980740] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.988690] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.298264] env[62619]: DEBUG oslo_vmware.api [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Task: {'id': task-1777878, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.577132} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.298264] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1618.298264] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1618.298264] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1618.298264] env[62619]: INFO nova.compute.manager [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Took 2.19 seconds to destroy the instance on the hypervisor. [ 1618.298264] env[62619]: DEBUG oslo.service.loopingcall [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1618.298264] env[62619]: DEBUG nova.compute.manager [-] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1618.298264] env[62619]: DEBUG nova.network.neutron [-] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1618.373896] env[62619]: DEBUG nova.network.neutron [-] [instance: 0f925028-c376-438f-8a56-deaa23047199] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.385818] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.385818] env[62619]: DEBUG nova.compute.manager [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1618.390996] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 38.377s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.390996] env[62619]: DEBUG nova.objects.instance [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1618.409449] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63002bf9-28da-49ed-bdf4-18713c361738 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.421782] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8d77f8-1699-4db0-b43c-8da052b97ef7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.457029] env[62619]: DEBUG nova.compute.manager [req-a05ad63f-5613-400c-94ba-41eedffe8588 req-16a84ea2-2878-48eb-9c66-8c7eab26ba48 service nova] [instance: 0f925028-c376-438f-8a56-deaa23047199] Detach interface failed, port_id=960fe70c-58cb-4649-a282-0bbe4a7ae3d3, reason: Instance 0f925028-c376-438f-8a56-deaa23047199 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1618.480541] env[62619]: DEBUG nova.compute.manager [req-67b0cfb2-3f0c-4ddf-b2ba-214018208ba5 req-f8756472-31bb-4afb-af33-2ecc38dd532f service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Received event network-vif-plugged-4e5b302f-7298-4265-84e1-3839343e7d8c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1618.480763] env[62619]: DEBUG oslo_concurrency.lockutils [req-67b0cfb2-3f0c-4ddf-b2ba-214018208ba5 req-f8756472-31bb-4afb-af33-2ecc38dd532f service nova] Acquiring lock "769905db-d19a-411f-bb5d-8196056b82aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.482022] env[62619]: DEBUG oslo_concurrency.lockutils [req-67b0cfb2-3f0c-4ddf-b2ba-214018208ba5 req-f8756472-31bb-4afb-af33-2ecc38dd532f service nova] Lock "769905db-d19a-411f-bb5d-8196056b82aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.482022] env[62619]: DEBUG oslo_concurrency.lockutils [req-67b0cfb2-3f0c-4ddf-b2ba-214018208ba5 req-f8756472-31bb-4afb-af33-2ecc38dd532f service nova] Lock "769905db-d19a-411f-bb5d-8196056b82aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.482022] env[62619]: DEBUG nova.compute.manager [req-67b0cfb2-3f0c-4ddf-b2ba-214018208ba5 req-f8756472-31bb-4afb-af33-2ecc38dd532f service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] No waiting events found dispatching network-vif-plugged-4e5b302f-7298-4265-84e1-3839343e7d8c {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1618.482022] env[62619]: WARNING nova.compute.manager [req-67b0cfb2-3f0c-4ddf-b2ba-214018208ba5 req-f8756472-31bb-4afb-af33-2ecc38dd532f service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Received unexpected event network-vif-plugged-4e5b302f-7298-4265-84e1-3839343e7d8c for instance with vm_state building and task_state spawning. [ 1618.492647] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777879, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475913} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.492895] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 5cf7ca57-351f-48ab-8758-b30f50cd607f/5cf7ca57-351f-48ab-8758-b30f50cd607f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1618.493113] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1618.493329] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a0743900-da83-48f6-b8bc-c6f7f7578739 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.499880] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1618.499880] env[62619]: value = "task-1777880" [ 1618.499880] env[62619]: _type = "Task" [ 1618.499880] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.507980] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777880, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.770155] env[62619]: DEBUG nova.network.neutron [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Successfully updated port: 4e5b302f-7298-4265-84e1-3839343e7d8c {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1618.878563] env[62619]: INFO nova.compute.manager [-] [instance: 0f925028-c376-438f-8a56-deaa23047199] Took 1.42 seconds to deallocate network for instance. [ 1618.900019] env[62619]: DEBUG nova.compute.utils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1618.904108] env[62619]: DEBUG nova.compute.manager [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1618.904108] env[62619]: DEBUG nova.network.neutron [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1619.000699] env[62619]: DEBUG nova.policy [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d9bd742e619436388884f1fcd3dcbc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c0a55c7699c4147a75d6f5f4dfe6e12', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1619.011070] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777880, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.186821} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.012028] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1619.013248] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003cc633-d201-43ad-bf8e-51ceb03e3bda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.038619] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 5cf7ca57-351f-48ab-8758-b30f50cd607f/5cf7ca57-351f-48ab-8758-b30f50cd607f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1619.038941] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-127d3dc6-cb49-489c-98cf-a820e8600362 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.061443] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1619.061443] env[62619]: value = "task-1777881" [ 1619.061443] env[62619]: _type = "Task" [ 1619.061443] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.069521] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777881, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.120175] env[62619]: DEBUG nova.network.neutron [-] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.273335] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "refresh_cache-769905db-d19a-411f-bb5d-8196056b82aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.273484] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "refresh_cache-769905db-d19a-411f-bb5d-8196056b82aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.273645] env[62619]: DEBUG nova.network.neutron [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1619.384219] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.404874] env[62619]: DEBUG nova.compute.manager [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1619.409177] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d51b7de-d4d6-4ff1-8853-d293caa2d367 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.410182] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 38.007s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.410349] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.410510] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1619.410762] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.997s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.410961] env[62619]: DEBUG nova.objects.instance [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lazy-loading 'resources' on Instance uuid ae37cae9-c82e-4775-8a8f-6bbf9108b0bd {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1619.412959] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1291bed-6453-4c07-aed3-f4372c593d1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.425092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8792f95-5817-437a-851f-e78fa7fc7f71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.447022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b083757a-e198-4b07-baac-55d26ae30fd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.454386] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a0af79-9d22-4c43-8076-26ca4a7f78b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.491462] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179076MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1619.491635] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.572156] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777881, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.622596] env[62619]: INFO nova.compute.manager [-] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Took 1.32 seconds to deallocate network for instance. [ 1619.819698] env[62619]: DEBUG nova.network.neutron [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1619.872991] env[62619]: DEBUG nova.network.neutron [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Successfully created port: 4d9eadaf-f867-4642-bf56-7866858ac8b0 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1619.996953] env[62619]: DEBUG nova.network.neutron [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Updating instance_info_cache with network_info: [{"id": "4e5b302f-7298-4265-84e1-3839343e7d8c", "address": "fa:16:3e:09:f8:b4", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e5b302f-72", "ovs_interfaceid": "4e5b302f-7298-4265-84e1-3839343e7d8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1620.072969] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777881, 'name': ReconfigVM_Task, 'duration_secs': 0.669009} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.072969] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 5cf7ca57-351f-48ab-8758-b30f50cd607f/5cf7ca57-351f-48ab-8758-b30f50cd607f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1620.073398] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cce74231-a787-4e22-978e-7e10c5d5c5f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.081827] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1620.081827] env[62619]: value = "task-1777882" [ 1620.081827] env[62619]: _type = "Task" [ 1620.081827] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.090427] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777882, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.133024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.252696] env[62619]: DEBUG nova.compute.manager [req-30a2367b-2429-4639-baf2-7ba755c7dc4c req-50a4289d-5440-4717-9818-3f256e5cfaf0 service nova] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Received event network-vif-deleted-ac8f6def-d1ab-4e64-a359-5a90c2d13c1c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1620.368185] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f19d7d-85d9-4b53-a139-5c70048eb2c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.375484] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc39106b-8ce4-4e7a-8721-b143c2759929 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.412621] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f503d7b-7672-4994-b267-7f97ccf45667 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.417709] env[62619]: DEBUG nova.compute.manager [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1620.424023] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89747303-e0fd-4183-bd3d-d90532b1ad98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.439842] env[62619]: DEBUG nova.compute.provider_tree [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1620.450358] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1620.450613] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1620.450769] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1620.451021] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1620.451210] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1620.451362] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1620.451565] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1620.451723] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1620.451886] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1620.452056] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1620.452235] env[62619]: DEBUG nova.virt.hardware [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1620.453084] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ea1740-a398-4eb8-9803-19421aec0741 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.462196] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7468e590-d933-4010-a295-9e1756b6d147 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.500859] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "refresh_cache-769905db-d19a-411f-bb5d-8196056b82aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.501209] env[62619]: DEBUG nova.compute.manager [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Instance network_info: |[{"id": "4e5b302f-7298-4265-84e1-3839343e7d8c", "address": "fa:16:3e:09:f8:b4", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e5b302f-72", "ovs_interfaceid": "4e5b302f-7298-4265-84e1-3839343e7d8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1620.501631] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:f8:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e5b302f-7298-4265-84e1-3839343e7d8c', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1620.509481] env[62619]: DEBUG oslo.service.loopingcall [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1620.509772] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1620.509934] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82667297-aeef-4866-a7dc-06b22f7d2e3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.530906] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1620.530906] env[62619]: value = "task-1777883" [ 1620.530906] env[62619]: _type = "Task" [ 1620.530906] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.537824] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777883, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.592289] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777882, 'name': Rename_Task, 'duration_secs': 0.260946} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.592663] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1620.592934] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ee03fcc-807b-4c10-937b-317a02547673 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.599381] env[62619]: DEBUG nova.compute.manager [req-d0f69a37-72fa-4fec-a097-6df0f65ade03 req-c220a32a-60e8-4421-8351-275bb1f365c0 service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Received event network-changed-4e5b302f-7298-4265-84e1-3839343e7d8c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1620.599381] env[62619]: DEBUG nova.compute.manager [req-d0f69a37-72fa-4fec-a097-6df0f65ade03 req-c220a32a-60e8-4421-8351-275bb1f365c0 service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Refreshing instance network info cache due to event network-changed-4e5b302f-7298-4265-84e1-3839343e7d8c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1620.599596] env[62619]: DEBUG oslo_concurrency.lockutils [req-d0f69a37-72fa-4fec-a097-6df0f65ade03 req-c220a32a-60e8-4421-8351-275bb1f365c0 service nova] Acquiring lock "refresh_cache-769905db-d19a-411f-bb5d-8196056b82aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.599596] env[62619]: DEBUG oslo_concurrency.lockutils [req-d0f69a37-72fa-4fec-a097-6df0f65ade03 req-c220a32a-60e8-4421-8351-275bb1f365c0 service nova] Acquired lock "refresh_cache-769905db-d19a-411f-bb5d-8196056b82aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.599755] env[62619]: DEBUG nova.network.neutron [req-d0f69a37-72fa-4fec-a097-6df0f65ade03 req-c220a32a-60e8-4421-8351-275bb1f365c0 service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Refreshing network info cache for port 4e5b302f-7298-4265-84e1-3839343e7d8c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1620.602734] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1620.602734] env[62619]: value = "task-1777884" [ 1620.602734] env[62619]: _type = "Task" [ 1620.602734] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.617626] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777884, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.945017] env[62619]: DEBUG nova.scheduler.client.report [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1621.039637] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777883, 'name': CreateVM_Task, 'duration_secs': 0.480582} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.039994] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1621.040618] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.040827] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.041192] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1621.041467] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b86369b4-b4fd-4f42-a04b-8ab63f8a31a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.046217] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1621.046217] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5233c52f-13f7-f4dc-8272-fbc5439f935b" [ 1621.046217] env[62619]: _type = "Task" [ 1621.046217] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.054276] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5233c52f-13f7-f4dc-8272-fbc5439f935b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.116559] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777884, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.328310] env[62619]: DEBUG nova.network.neutron [req-d0f69a37-72fa-4fec-a097-6df0f65ade03 req-c220a32a-60e8-4421-8351-275bb1f365c0 service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Updated VIF entry in instance network info cache for port 4e5b302f-7298-4265-84e1-3839343e7d8c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1621.328687] env[62619]: DEBUG nova.network.neutron [req-d0f69a37-72fa-4fec-a097-6df0f65ade03 req-c220a32a-60e8-4421-8351-275bb1f365c0 service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Updating instance_info_cache with network_info: [{"id": "4e5b302f-7298-4265-84e1-3839343e7d8c", "address": "fa:16:3e:09:f8:b4", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e5b302f-72", "ovs_interfaceid": "4e5b302f-7298-4265-84e1-3839343e7d8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.451615] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.039s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.452717] env[62619]: DEBUG oslo_concurrency.lockutils [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 36.574s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.479410] env[62619]: INFO nova.scheduler.client.report [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted allocations for instance ae37cae9-c82e-4775-8a8f-6bbf9108b0bd [ 1621.557766] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5233c52f-13f7-f4dc-8272-fbc5439f935b, 'name': SearchDatastore_Task, 'duration_secs': 0.018912} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.558083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.558320] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1621.558546] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.558686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.558859] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1621.559123] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c612daf1-0966-4412-b16d-7d4738f92c63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.568932] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1621.569197] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1621.569955] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3546b849-690e-48da-a1da-56321978b9ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.576328] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1621.576328] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef951e-6edc-cc2c-4aec-f003b9a68b16" [ 1621.576328] env[62619]: _type = "Task" [ 1621.576328] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.584802] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef951e-6edc-cc2c-4aec-f003b9a68b16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.615542] env[62619]: DEBUG oslo_vmware.api [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1777884, 'name': PowerOnVM_Task, 'duration_secs': 0.8843} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.615944] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1621.616340] env[62619]: INFO nova.compute.manager [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Took 8.47 seconds to spawn the instance on the hypervisor. [ 1621.616640] env[62619]: DEBUG nova.compute.manager [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1621.617498] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92a0c1c-658c-4ffc-81f0-c33a70c1c0aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.833019] env[62619]: DEBUG oslo_concurrency.lockutils [req-d0f69a37-72fa-4fec-a097-6df0f65ade03 req-c220a32a-60e8-4421-8351-275bb1f365c0 service nova] Releasing lock "refresh_cache-769905db-d19a-411f-bb5d-8196056b82aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.873089] env[62619]: DEBUG nova.network.neutron [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Successfully updated port: 4d9eadaf-f867-4642-bf56-7866858ac8b0 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1621.957442] env[62619]: DEBUG nova.objects.instance [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lazy-loading 'migration_context' on Instance uuid dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1621.988297] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f275b5a7-9226-4d9d-94c4-fcce7aa59913 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "ae37cae9-c82e-4775-8a8f-6bbf9108b0bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.175s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.086953] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ef951e-6edc-cc2c-4aec-f003b9a68b16, 'name': SearchDatastore_Task, 'duration_secs': 0.013109} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.087786] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8e42cb1-488c-4d43-b205-97b3864ec9fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.093119] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1622.093119] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ed709f-454a-1b54-c640-f8e144e2b370" [ 1622.093119] env[62619]: _type = "Task" [ 1622.093119] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.100891] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ed709f-454a-1b54-c640-f8e144e2b370, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.135894] env[62619]: INFO nova.compute.manager [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Took 52.44 seconds to build instance. [ 1622.379805] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquiring lock "refresh_cache-7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.379805] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquired lock "refresh_cache-7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.379805] env[62619]: DEBUG nova.network.neutron [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1622.605851] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ed709f-454a-1b54-c640-f8e144e2b370, 'name': SearchDatastore_Task, 'duration_secs': 0.0105} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.606197] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.606411] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 769905db-d19a-411f-bb5d-8196056b82aa/769905db-d19a-411f-bb5d-8196056b82aa.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1622.606916] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a2252f5-f08a-4a4f-bdba-5c7c456c2fc9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.620835] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1622.620835] env[62619]: value = "task-1777885" [ 1622.620835] env[62619]: _type = "Task" [ 1622.620835] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.628819] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777885, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.638180] env[62619]: DEBUG nova.compute.manager [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Received event network-vif-plugged-4d9eadaf-f867-4642-bf56-7866858ac8b0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1622.638180] env[62619]: DEBUG oslo_concurrency.lockutils [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] Acquiring lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.638180] env[62619]: DEBUG oslo_concurrency.lockutils [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] Lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.638180] env[62619]: DEBUG oslo_concurrency.lockutils [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] Lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.638180] env[62619]: DEBUG nova.compute.manager [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] No waiting events found dispatching network-vif-plugged-4d9eadaf-f867-4642-bf56-7866858ac8b0 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1622.638180] env[62619]: WARNING nova.compute.manager [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Received unexpected event network-vif-plugged-4d9eadaf-f867-4642-bf56-7866858ac8b0 for instance with vm_state building and task_state spawning. [ 1622.638180] env[62619]: DEBUG nova.compute.manager [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Received event network-changed-4d9eadaf-f867-4642-bf56-7866858ac8b0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1622.638425] env[62619]: DEBUG nova.compute.manager [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Refreshing instance network info cache due to event network-changed-4d9eadaf-f867-4642-bf56-7866858ac8b0. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1622.638425] env[62619]: DEBUG oslo_concurrency.lockutils [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] Acquiring lock "refresh_cache-7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.639468] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64b638b1-091c-4e19-8d09-76d07f340a97 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.712s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.814312] env[62619]: DEBUG nova.compute.manager [req-c9e2f6a0-99d6-440e-8319-964b711c2df2 req-0076f044-8961-49ac-809f-05198f7106f9 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received event network-changed-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1622.814397] env[62619]: DEBUG nova.compute.manager [req-c9e2f6a0-99d6-440e-8319-964b711c2df2 req-0076f044-8961-49ac-809f-05198f7106f9 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Refreshing instance network info cache due to event network-changed-89e91bb9-2bd5-4385-b3dd-cee4612bb166. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1622.815516] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9e2f6a0-99d6-440e-8319-964b711c2df2 req-0076f044-8961-49ac-809f-05198f7106f9 service nova] Acquiring lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.815516] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9e2f6a0-99d6-440e-8319-964b711c2df2 req-0076f044-8961-49ac-809f-05198f7106f9 service nova] Acquired lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.815516] env[62619]: DEBUG nova.network.neutron [req-c9e2f6a0-99d6-440e-8319-964b711c2df2 req-0076f044-8961-49ac-809f-05198f7106f9 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Refreshing network info cache for port 89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1622.900567] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78947f2a-9b49-44d8-ad24-e5d13617ec17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.911110] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f142b70f-3101-4527-a50c-ed10a3e3b4e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.947080] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21aa144-a8d2-426e-b20c-cf23cccbf6a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.955363] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4363afbc-6b7d-4ccd-9959-ed6649d79ad2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.972958] env[62619]: DEBUG nova.compute.provider_tree [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1622.975160] env[62619]: DEBUG nova.network.neutron [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1623.130684] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777885, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500149} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.132113] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 769905db-d19a-411f-bb5d-8196056b82aa/769905db-d19a-411f-bb5d-8196056b82aa.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1623.132113] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1623.132113] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b92ed97b-da08-4b6c-a04c-4e6d369db1e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.139639] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1623.139639] env[62619]: value = "task-1777886" [ 1623.139639] env[62619]: _type = "Task" [ 1623.139639] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.147025] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777886, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.213010] env[62619]: DEBUG nova.network.neutron [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Updating instance_info_cache with network_info: [{"id": "4d9eadaf-f867-4642-bf56-7866858ac8b0", "address": "fa:16:3e:15:87:9e", "network": {"id": "fa982f07-dafb-40ea-9b20-3b2193167cd2", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-992408249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c0a55c7699c4147a75d6f5f4dfe6e12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d9eadaf-f8", "ovs_interfaceid": "4d9eadaf-f867-4642-bf56-7866858ac8b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.416018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquiring lock "20d62152-3859-4023-a11d-b17c76e1090a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.416265] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lock "20d62152-3859-4023-a11d-b17c76e1090a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.481016] env[62619]: DEBUG nova.scheduler.client.report [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1623.598769] env[62619]: DEBUG nova.network.neutron [req-c9e2f6a0-99d6-440e-8319-964b711c2df2 req-0076f044-8961-49ac-809f-05198f7106f9 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updated VIF entry in instance network info cache for port 89e91bb9-2bd5-4385-b3dd-cee4612bb166. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1623.599186] env[62619]: DEBUG nova.network.neutron [req-c9e2f6a0-99d6-440e-8319-964b711c2df2 req-0076f044-8961-49ac-809f-05198f7106f9 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updating instance_info_cache with network_info: [{"id": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "address": "fa:16:3e:78:49:c7", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e91bb9-2b", "ovs_interfaceid": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.649112] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777886, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142401} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.649385] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1623.650137] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f429a9-e24f-4624-bddd-2e8d043379f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.672456] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 769905db-d19a-411f-bb5d-8196056b82aa/769905db-d19a-411f-bb5d-8196056b82aa.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1623.672723] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbec47a7-d29b-486e-9590-9825261bc69a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.694396] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1623.694396] env[62619]: value = "task-1777887" [ 1623.694396] env[62619]: _type = "Task" [ 1623.694396] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.701941] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777887, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.715496] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Releasing lock "refresh_cache-7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.715795] env[62619]: DEBUG nova.compute.manager [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Instance network_info: |[{"id": "4d9eadaf-f867-4642-bf56-7866858ac8b0", "address": "fa:16:3e:15:87:9e", "network": {"id": "fa982f07-dafb-40ea-9b20-3b2193167cd2", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-992408249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c0a55c7699c4147a75d6f5f4dfe6e12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d9eadaf-f8", "ovs_interfaceid": "4d9eadaf-f867-4642-bf56-7866858ac8b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1623.716081] env[62619]: DEBUG oslo_concurrency.lockutils [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] Acquired lock "refresh_cache-7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.716262] env[62619]: DEBUG nova.network.neutron [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Refreshing network info cache for port 4d9eadaf-f867-4642-bf56-7866858ac8b0 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1623.718018] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:87:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3739ba33-c119-432c-9aee-80a62864317d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d9eadaf-f867-4642-bf56-7866858ac8b0', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1623.725495] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Creating folder: Project (6c0a55c7699c4147a75d6f5f4dfe6e12). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1623.726382] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d06c32a-ba02-4cd6-9041-168f4392362b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.735991] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Created folder: Project (6c0a55c7699c4147a75d6f5f4dfe6e12) in parent group-v368875. [ 1623.736094] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Creating folder: Instances. Parent ref: group-v369066. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1623.736282] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4da138f-8827-4764-88be-9356072fd683 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.744769] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Created folder: Instances in parent group-v369066. [ 1623.744982] env[62619]: DEBUG oslo.service.loopingcall [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1623.745177] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1623.745372] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d68da524-7929-4647-a4c3-1aa163829ad1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.763021] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1623.763021] env[62619]: value = "task-1777890" [ 1623.763021] env[62619]: _type = "Task" [ 1623.763021] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.770032] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777890, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.919046] env[62619]: DEBUG nova.compute.manager [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1624.101545] env[62619]: DEBUG oslo_concurrency.lockutils [req-c9e2f6a0-99d6-440e-8319-964b711c2df2 req-0076f044-8961-49ac-809f-05198f7106f9 service nova] Releasing lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.204769] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.274167] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777890, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.445472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.492791] env[62619]: DEBUG oslo_concurrency.lockutils [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.040s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.498956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.654s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.500794] env[62619]: INFO nova.compute.claims [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1624.564327] env[62619]: DEBUG nova.network.neutron [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Updated VIF entry in instance network info cache for port 4d9eadaf-f867-4642-bf56-7866858ac8b0. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1624.564733] env[62619]: DEBUG nova.network.neutron [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Updating instance_info_cache with network_info: [{"id": "4d9eadaf-f867-4642-bf56-7866858ac8b0", "address": "fa:16:3e:15:87:9e", "network": {"id": "fa982f07-dafb-40ea-9b20-3b2193167cd2", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-992408249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c0a55c7699c4147a75d6f5f4dfe6e12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d9eadaf-f8", "ovs_interfaceid": "4d9eadaf-f867-4642-bf56-7866858ac8b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.705519] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777887, 'name': ReconfigVM_Task, 'duration_secs': 0.689052} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.705967] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 769905db-d19a-411f-bb5d-8196056b82aa/769905db-d19a-411f-bb5d-8196056b82aa.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1624.706554] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79d6148d-6d0f-414d-bfa3-7b56ad132f25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.712772] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1624.712772] env[62619]: value = "task-1777891" [ 1624.712772] env[62619]: _type = "Task" [ 1624.712772] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.721160] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777891, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.773674] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777890, 'name': CreateVM_Task, 'duration_secs': 0.846756} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.773853] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1624.774599] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.774760] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.775107] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1624.775367] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ee02e63-d46a-48bd-bd7b-41612f5d33d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.780043] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for the task: (returnval){ [ 1624.780043] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db0908-af2d-a7fc-c965-98112391077d" [ 1624.780043] env[62619]: _type = "Task" [ 1624.780043] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.787603] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db0908-af2d-a7fc-c965-98112391077d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.067514] env[62619]: DEBUG oslo_concurrency.lockutils [req-4271aa50-14f0-41d1-aa8f-328e7584f5a6 req-30da652c-7867-4ec5-8601-65b7bd47ee30 service nova] Releasing lock "refresh_cache-7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.223040] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777891, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.290440] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db0908-af2d-a7fc-c965-98112391077d, 'name': SearchDatastore_Task, 'duration_secs': 0.016853} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.290745] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.291214] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1625.291214] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.291347] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.291730] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1625.291814] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cf727a5-9249-4dee-b387-a9d13e0a20d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.300628] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1625.300751] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1625.301993] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83ddc8ca-d1e2-4ea8-9b32-a01e38f2e7e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.306438] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for the task: (returnval){ [ 1625.306438] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e3f2d9-2da5-d6ed-9936-2c45bd979449" [ 1625.306438] env[62619]: _type = "Task" [ 1625.306438] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.314162] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e3f2d9-2da5-d6ed-9936-2c45bd979449, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.724073] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777891, 'name': Rename_Task, 'duration_secs': 0.513387} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.724302] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1625.726272] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-decf2f12-bdca-400e-bd1e-7645c34e1aa0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.732536] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1625.732536] env[62619]: value = "task-1777892" [ 1625.732536] env[62619]: _type = "Task" [ 1625.732536] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.741475] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777892, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.819287] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e3f2d9-2da5-d6ed-9936-2c45bd979449, 'name': SearchDatastore_Task, 'duration_secs': 0.009494} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.820073] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11ad5865-e6f1-4125-bee2-1366d94a0a90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.827567] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for the task: (returnval){ [ 1625.827567] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5292676c-a841-e905-5d90-121268ed3a90" [ 1625.827567] env[62619]: _type = "Task" [ 1625.827567] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.835126] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5292676c-a841-e905-5d90-121268ed3a90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.886362] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1885445-73bf-44bf-aecd-03f7a6b5b121 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.894606] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3144ffb6-9003-4199-9f8f-1fb920f3262c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.931743] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5434b143-dc38-431f-abf0-731d0dd891bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.942177] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ff9717-b87a-41ea-88d5-ad0cc28c99e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.957183] env[62619]: DEBUG nova.compute.provider_tree [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1626.039154] env[62619]: INFO nova.compute.manager [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Swapping old allocation on dict_keys(['e814b747-ed75-487b-a97d-acf66bc6db0b']) held by migration 54adc27f-a3e2-473a-ac9e-41a3f933f141 for instance [ 1626.060508] env[62619]: DEBUG nova.scheduler.client.report [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Overwriting current allocation {'allocations': {'e814b747-ed75-487b-a97d-acf66bc6db0b': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 98}}, 'project_id': 'af811eaa982a4c329e8ab9b58f4c8695', 'user_id': 'bb56261151994b459d40b190725f3867', 'consumer_generation': 1} on consumer dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5 {{(pid=62619) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1626.145130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.145365] env[62619]: DEBUG oslo_concurrency.lockutils [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquired lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.145548] env[62619]: DEBUG nova.network.neutron [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1626.241523] env[62619]: DEBUG oslo_vmware.api [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777892, 'name': PowerOnVM_Task, 'duration_secs': 0.476562} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.241782] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1626.241982] env[62619]: INFO nova.compute.manager [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1626.242190] env[62619]: DEBUG nova.compute.manager [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1626.242939] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b8d2fb-76fa-4bdb-a6d1-fe0aec65fc4f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.339464] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5292676c-a841-e905-5d90-121268ed3a90, 'name': SearchDatastore_Task, 'duration_secs': 0.009151} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.339801] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.340103] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9/7ee5f09f-e27b-4373-88ce-8cff2f55a2b9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1626.340429] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81daa0a2-49b1-48c0-93e2-c3bbe49c97c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.347093] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for the task: (returnval){ [ 1626.347093] env[62619]: value = "task-1777893" [ 1626.347093] env[62619]: _type = "Task" [ 1626.347093] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.355095] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.460024] env[62619]: DEBUG nova.scheduler.client.report [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1626.761101] env[62619]: INFO nova.compute.manager [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Took 52.31 seconds to build instance. [ 1626.856815] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470446} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.857186] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9/7ee5f09f-e27b-4373-88ce-8cff2f55a2b9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1626.857446] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1626.857735] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58104616-f121-4fa4-8f5c-627d1483a2e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.866057] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for the task: (returnval){ [ 1626.866057] env[62619]: value = "task-1777894" [ 1626.866057] env[62619]: _type = "Task" [ 1626.866057] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.875730] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777894, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.965669] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.966474] env[62619]: DEBUG nova.compute.manager [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1626.969425] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.831s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.969609] env[62619]: DEBUG nova.objects.instance [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1627.023304] env[62619]: DEBUG nova.network.neutron [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance_info_cache with network_info: [{"id": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "address": "fa:16:3e:70:d7:d3", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1ace9af-97", "ovs_interfaceid": "b1ace9af-97b6-4d21-bbe4-972a2a1c1e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.235424] env[62619]: DEBUG oslo_concurrency.lockutils [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "769905db-d19a-411f-bb5d-8196056b82aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.263696] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d6530c-67c0-4ff5-9288-2bd626c01763 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "769905db-d19a-411f-bb5d-8196056b82aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.823s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.263983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "769905db-d19a-411f-bb5d-8196056b82aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.029s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.264214] env[62619]: DEBUG oslo_concurrency.lockutils [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "769905db-d19a-411f-bb5d-8196056b82aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.264419] env[62619]: DEBUG oslo_concurrency.lockutils [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "769905db-d19a-411f-bb5d-8196056b82aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.264653] env[62619]: DEBUG oslo_concurrency.lockutils [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "769905db-d19a-411f-bb5d-8196056b82aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.268626] env[62619]: INFO nova.compute.manager [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Terminating instance [ 1627.377880] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777894, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153764} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.378129] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1627.378863] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4b7d46-c769-4ef0-9f78-bf5229727924 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.400862] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9/7ee5f09f-e27b-4373-88ce-8cff2f55a2b9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1627.401103] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6601e59-762b-489c-87ab-73b65ca3307a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.421286] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for the task: (returnval){ [ 1627.421286] env[62619]: value = "task-1777895" [ 1627.421286] env[62619]: _type = "Task" [ 1627.421286] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.429548] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777895, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.471036] env[62619]: DEBUG nova.compute.utils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1627.472491] env[62619]: DEBUG nova.compute.manager [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1627.472681] env[62619]: DEBUG nova.network.neutron [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1627.512768] env[62619]: DEBUG nova.policy [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25159d73422b45dbbe4bab2b2a835055', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df50ba9d97ac4c059077c87f9cfdb719', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1627.526319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Releasing lock "refresh_cache-dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.526739] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1627.527010] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c302018-a04a-4c52-ad46-f12b9889589e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.534171] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1627.534171] env[62619]: value = "task-1777896" [ 1627.534171] env[62619]: _type = "Task" [ 1627.534171] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.542272] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777896, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.775659] env[62619]: DEBUG nova.compute.manager [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1627.775999] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1627.777186] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e0acf8-162d-4bbc-a664-c796a5cc255d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.786879] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1627.786879] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91e46744-9846-4f1e-bbce-73ab8490be43 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.792615] env[62619]: DEBUG oslo_vmware.api [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1627.792615] env[62619]: value = "task-1777897" [ 1627.792615] env[62619]: _type = "Task" [ 1627.792615] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.800978] env[62619]: DEBUG oslo_vmware.api [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.939350] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777895, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.976467] env[62619]: DEBUG nova.compute.manager [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1627.980336] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ee944d75-0acb-40f4-af95-631ff613b1e8 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.981676] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.404s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.981904] env[62619]: DEBUG nova.objects.instance [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lazy-loading 'resources' on Instance uuid c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1628.049084] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777896, 'name': PowerOffVM_Task, 'duration_secs': 0.262291} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.050526] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1628.050935] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:50:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='026a8c7d-034e-431f-86ad-5b594effd325',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-602383372',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1628.051377] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1628.051690] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1628.052066] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1628.052405] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1628.052719] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1628.053169] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1628.053482] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1628.053864] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1628.054212] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1628.054578] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1628.064703] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa8c1b7b-5446-499c-a9ac-23c746122bac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.089491] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1628.089491] env[62619]: value = "task-1777898" [ 1628.089491] env[62619]: _type = "Task" [ 1628.089491] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.102501] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777898, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.229457] env[62619]: DEBUG nova.network.neutron [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Successfully created port: f289fb71-1285-4a29-9580-10815cd08cba {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1628.303417] env[62619]: DEBUG oslo_vmware.api [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777897, 'name': PowerOffVM_Task, 'duration_secs': 0.400886} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.303691] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1628.303857] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1628.304142] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e6e7fab-f14b-4103-9713-be867f16024f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.432969] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777895, 'name': ReconfigVM_Task, 'duration_secs': 0.917117} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.433295] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9/7ee5f09f-e27b-4373-88ce-8cff2f55a2b9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1628.433942] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84dc62be-61ee-4d4d-bf14-6dab73cabcfe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.440356] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for the task: (returnval){ [ 1628.440356] env[62619]: value = "task-1777900" [ 1628.440356] env[62619]: _type = "Task" [ 1628.440356] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.453615] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777900, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.480966] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1628.481200] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1628.481417] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleting the datastore file [datastore1] 769905db-d19a-411f-bb5d-8196056b82aa {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1628.485558] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4694a6e-ce63-45d5-8b3a-68606e3c4a6e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.495382] env[62619]: DEBUG oslo_vmware.api [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1628.495382] env[62619]: value = "task-1777901" [ 1628.495382] env[62619]: _type = "Task" [ 1628.495382] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.504729] env[62619]: DEBUG oslo_vmware.api [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777901, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.602295] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777898, 'name': ReconfigVM_Task, 'duration_secs': 0.234118} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.604366] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766c6c56-2c0f-4225-9c62-2486e2065cce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.636236] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:50:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='026a8c7d-034e-431f-86ad-5b594effd325',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-602383372',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1628.636236] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1628.636363] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1628.636668] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1628.641022] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1628.641022] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1628.641022] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1628.641022] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1628.641022] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1628.641022] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1628.641022] env[62619]: DEBUG nova.virt.hardware [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1628.644803] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8556c8b-2fe3-400c-b1ef-7537b417b82a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.653097] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1628.653097] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527385ef-6f56-749d-8a29-1312427b11d4" [ 1628.653097] env[62619]: _type = "Task" [ 1628.653097] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.667280] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527385ef-6f56-749d-8a29-1312427b11d4, 'name': SearchDatastore_Task, 'duration_secs': 0.009595} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.678494] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1628.679102] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edc22306-0705-45df-abc7-5ab969f02774 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.698334] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1628.698334] env[62619]: value = "task-1777902" [ 1628.698334] env[62619]: _type = "Task" [ 1628.698334] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.710366] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777902, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.957030] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777900, 'name': Rename_Task, 'duration_secs': 0.313139} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.957419] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1628.957596] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddc0a2cd-e2ab-4f70-b783-ab584ebeebfa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.966076] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for the task: (returnval){ [ 1628.966076] env[62619]: value = "task-1777903" [ 1628.966076] env[62619]: _type = "Task" [ 1628.966076] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.974192] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777903, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.991256] env[62619]: DEBUG nova.compute.manager [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1629.008860] env[62619]: DEBUG oslo_vmware.api [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1777901, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.345704} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.012135] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1629.012388] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1629.012598] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1629.012807] env[62619]: INFO nova.compute.manager [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1629.013316] env[62619]: DEBUG oslo.service.loopingcall [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1629.015558] env[62619]: DEBUG nova.compute.manager [-] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1629.015643] env[62619]: DEBUG nova.network.neutron [-] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1629.029444] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1629.029636] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1629.032018] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1629.032018] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1629.032018] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1629.032018] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1629.032018] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1629.032018] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1629.032018] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1629.032018] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1629.032018] env[62619]: DEBUG nova.virt.hardware [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1629.032402] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3b5e76-d62d-4480-a712-dfdc65b60347 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.036640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7446d8e2-7136-4766-ac8b-102917fab892 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.046673] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f199b72-30b6-4338-8009-bb70aa6deebc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.051416] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e200d180-4047-4bb2-bbc8-219409decaa2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.097709] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fba4d80-4c21-47bd-aca4-eb89df7d9cf3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.106075] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a62433-4526-48ca-9b7e-e74d2f65a5bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.119917] env[62619]: DEBUG nova.compute.provider_tree [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.211700] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777902, 'name': ReconfigVM_Task, 'duration_secs': 0.280469} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.212044] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1629.213041] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74084414-c667-4176-ad66-405d63f4fec0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.238823] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5/dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1629.239225] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87edb9e2-25f8-407f-ba42-3d340dd3250d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.260317] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1629.260317] env[62619]: value = "task-1777904" [ 1629.260317] env[62619]: _type = "Task" [ 1629.260317] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.269081] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777904, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.310877] env[62619]: DEBUG nova.compute.manager [req-d6b85f19-0db1-477f-b3bf-9b4cdb7f9cd1 req-49935ad1-b532-493f-a40c-1cfc25e5ccbf service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Received event network-vif-deleted-4e5b302f-7298-4265-84e1-3839343e7d8c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1629.311051] env[62619]: INFO nova.compute.manager [req-d6b85f19-0db1-477f-b3bf-9b4cdb7f9cd1 req-49935ad1-b532-493f-a40c-1cfc25e5ccbf service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Neutron deleted interface 4e5b302f-7298-4265-84e1-3839343e7d8c; detaching it from the instance and deleting it from the info cache [ 1629.311282] env[62619]: DEBUG nova.network.neutron [req-d6b85f19-0db1-477f-b3bf-9b4cdb7f9cd1 req-49935ad1-b532-493f-a40c-1cfc25e5ccbf service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.476365] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777903, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.623385] env[62619]: DEBUG nova.scheduler.client.report [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1629.771680] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777904, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.788202] env[62619]: DEBUG nova.network.neutron [-] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.814605] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3df1f7fd-d52b-4ddc-a337-27ab09855a17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.824481] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52248667-1c13-407b-8f81-4e47d72589d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.857675] env[62619]: DEBUG nova.compute.manager [req-d6b85f19-0db1-477f-b3bf-9b4cdb7f9cd1 req-49935ad1-b532-493f-a40c-1cfc25e5ccbf service nova] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Detach interface failed, port_id=4e5b302f-7298-4265-84e1-3839343e7d8c, reason: Instance 769905db-d19a-411f-bb5d-8196056b82aa could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1629.977265] env[62619]: DEBUG oslo_vmware.api [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1777903, 'name': PowerOnVM_Task, 'duration_secs': 0.82584} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.978918] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1629.980018] env[62619]: INFO nova.compute.manager [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Took 9.56 seconds to spawn the instance on the hypervisor. [ 1629.980018] env[62619]: DEBUG nova.compute.manager [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1629.984016] env[62619]: DEBUG nova.compute.manager [req-4c913913-1d22-45d9-9d70-b172f166fb00 req-7157a439-aefb-46e3-829a-d052ffdc19d4 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Received event network-vif-plugged-f289fb71-1285-4a29-9580-10815cd08cba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1629.984016] env[62619]: DEBUG oslo_concurrency.lockutils [req-4c913913-1d22-45d9-9d70-b172f166fb00 req-7157a439-aefb-46e3-829a-d052ffdc19d4 service nova] Acquiring lock "cbff225f-2d11-4a43-a320-95dd3afb8e48-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.984016] env[62619]: DEBUG oslo_concurrency.lockutils [req-4c913913-1d22-45d9-9d70-b172f166fb00 req-7157a439-aefb-46e3-829a-d052ffdc19d4 service nova] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.984016] env[62619]: DEBUG oslo_concurrency.lockutils [req-4c913913-1d22-45d9-9d70-b172f166fb00 req-7157a439-aefb-46e3-829a-d052ffdc19d4 service nova] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.984016] env[62619]: DEBUG nova.compute.manager [req-4c913913-1d22-45d9-9d70-b172f166fb00 req-7157a439-aefb-46e3-829a-d052ffdc19d4 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] No waiting events found dispatching network-vif-plugged-f289fb71-1285-4a29-9580-10815cd08cba {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1629.984016] env[62619]: WARNING nova.compute.manager [req-4c913913-1d22-45d9-9d70-b172f166fb00 req-7157a439-aefb-46e3-829a-d052ffdc19d4 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Received unexpected event network-vif-plugged-f289fb71-1285-4a29-9580-10815cd08cba for instance with vm_state building and task_state spawning. [ 1629.984016] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7602bd51-9636-4334-99a3-b8387289c9f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.132049] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.148s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.132049] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.815s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.132826] env[62619]: DEBUG nova.objects.instance [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lazy-loading 'resources' on Instance uuid 5b1008fb-7c0a-4e12-90f8-119a82ea62f1 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1630.162770] env[62619]: INFO nova.scheduler.client.report [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Deleted allocations for instance c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3 [ 1630.193091] env[62619]: DEBUG nova.network.neutron [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Successfully updated port: f289fb71-1285-4a29-9580-10815cd08cba {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1630.271987] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777904, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.290887] env[62619]: INFO nova.compute.manager [-] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Took 1.28 seconds to deallocate network for instance. [ 1630.501067] env[62619]: INFO nova.compute.manager [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Took 53.18 seconds to build instance. [ 1630.678044] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86927623-877c-4808-99f5-c302c17d3d77 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.479s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.694789] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.695547] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.695547] env[62619]: DEBUG nova.network.neutron [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1630.775132] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777904, 'name': ReconfigVM_Task, 'duration_secs': 1.014432} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.775544] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Reconfigured VM instance instance-00000032 to attach disk [datastore1] dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5/dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1630.778824] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d019978e-fcc1-43b4-b798-896b247dbe37 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.805982] env[62619]: DEBUG oslo_concurrency.lockutils [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.810702] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d620a14-4ec5-4468-a9d8-f2bc773ffa9a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.844115] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67313246-0438-48cb-9c9c-628914d9132e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.876458] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3db6fe7-b73f-406e-b920-93c95f3772d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.886291] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1630.886617] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2082b293-ac73-4847-b9b8-b3d0f888ac3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.893448] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1630.893448] env[62619]: value = "task-1777906" [ 1630.893448] env[62619]: _type = "Task" [ 1630.893448] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.901124] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777906, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.002736] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a84f6206-4fcb-452c-a832-0ad8bdb7e64d tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.724s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.093327] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698551eb-f6b7-4214-ac87-ff9417c68333 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.100811] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b6d2e0-44fa-445a-8eaa-199e961509f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.132284] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7c4796-7f86-4bc9-b20f-a52b4f785641 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.140430] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b93023-9409-4a47-9bc1-575e61b5f7f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.153819] env[62619]: DEBUG nova.compute.provider_tree [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1631.289413] env[62619]: DEBUG nova.network.neutron [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1631.404303] env[62619]: DEBUG oslo_vmware.api [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777906, 'name': PowerOnVM_Task, 'duration_secs': 0.40054} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.404303] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1631.601169] env[62619]: DEBUG nova.network.neutron [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance_info_cache with network_info: [{"id": "f289fb71-1285-4a29-9580-10815cd08cba", "address": "fa:16:3e:bf:f4:72", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf289fb71-12", "ovs_interfaceid": "f289fb71-1285-4a29-9580-10815cd08cba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.657154] env[62619]: DEBUG nova.scheduler.client.report [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1632.016875] env[62619]: DEBUG nova.compute.manager [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Received event network-changed-f289fb71-1285-4a29-9580-10815cd08cba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1632.017152] env[62619]: DEBUG nova.compute.manager [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Refreshing instance network info cache due to event network-changed-f289fb71-1285-4a29-9580-10815cd08cba. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1632.017276] env[62619]: DEBUG oslo_concurrency.lockutils [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] Acquiring lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.103901] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.104253] env[62619]: DEBUG nova.compute.manager [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Instance network_info: |[{"id": "f289fb71-1285-4a29-9580-10815cd08cba", "address": "fa:16:3e:bf:f4:72", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf289fb71-12", "ovs_interfaceid": "f289fb71-1285-4a29-9580-10815cd08cba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1632.104842] env[62619]: DEBUG oslo_concurrency.lockutils [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] Acquired lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.104842] env[62619]: DEBUG nova.network.neutron [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Refreshing network info cache for port f289fb71-1285-4a29-9580-10815cd08cba {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1632.106358] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:f4:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f289fb71-1285-4a29-9580-10815cd08cba', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1632.114016] env[62619]: DEBUG oslo.service.loopingcall [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1632.114972] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1632.115191] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15bbcacb-1c05-45db-b04d-5719f47e9e87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.137223] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1632.137223] env[62619]: value = "task-1777907" [ 1632.137223] env[62619]: _type = "Task" [ 1632.137223] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.144754] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777907, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.162175] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.030s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.165306] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.221s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.165306] env[62619]: DEBUG nova.objects.instance [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lazy-loading 'resources' on Instance uuid 6cd2f6e6-79a4-41be-a349-b504028ecab4 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1632.186777] env[62619]: INFO nova.scheduler.client.report [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted allocations for instance 5b1008fb-7c0a-4e12-90f8-119a82ea62f1 [ 1632.416744] env[62619]: INFO nova.compute.manager [None req-32290a58-c669-4b25-8c35-1165278d6cbb tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance to original state: 'active' [ 1632.647193] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777907, 'name': CreateVM_Task, 'duration_secs': 0.454025} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.647372] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1632.648037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.648208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.648515] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1632.648763] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc9041d6-8976-467a-91ca-9dc4b1d29fb8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.653365] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1632.653365] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a83310-6d06-1810-eb36-47d6c83d1a7d" [ 1632.653365] env[62619]: _type = "Task" [ 1632.653365] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.661060] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a83310-6d06-1810-eb36-47d6c83d1a7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.695083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f9f7cab-46bc-48cd-96b3-5f4de270811e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "5b1008fb-7c0a-4e12-90f8-119a82ea62f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.382s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.041889] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ff154e-7a2f-411e-8f6b-15866521b004 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.049521] env[62619]: DEBUG nova.network.neutron [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updated VIF entry in instance network info cache for port f289fb71-1285-4a29-9580-10815cd08cba. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1633.049865] env[62619]: DEBUG nova.network.neutron [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance_info_cache with network_info: [{"id": "f289fb71-1285-4a29-9580-10815cd08cba", "address": "fa:16:3e:bf:f4:72", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf289fb71-12", "ovs_interfaceid": "f289fb71-1285-4a29-9580-10815cd08cba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.051667] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f4c42d-c37b-4984-ac8e-2bfa0f61a58b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.084800] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e644c495-64aa-4792-9603-6d8dd06dc5ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.092825] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2eb74b4-a6bb-4f67-bc29-247eb78f7287 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.106903] env[62619]: DEBUG nova.compute.provider_tree [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1633.163740] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a83310-6d06-1810-eb36-47d6c83d1a7d, 'name': SearchDatastore_Task, 'duration_secs': 0.010605} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.164172] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.164278] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1633.164503] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.164644] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.164885] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1633.165194] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47bdc27f-9171-4aa0-8d73-850e9bbb3297 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.175014] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1633.175196] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1633.175892] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b237b39-e976-4a2c-8445-6310279a2a46 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.181028] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1633.181028] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528e23e1-c4fe-de02-c486-8d243302ff06" [ 1633.181028] env[62619]: _type = "Task" [ 1633.181028] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.188375] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528e23e1-c4fe-de02-c486-8d243302ff06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.555803] env[62619]: DEBUG oslo_concurrency.lockutils [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] Releasing lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.556103] env[62619]: DEBUG nova.compute.manager [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Received event network-changed-4d9eadaf-f867-4642-bf56-7866858ac8b0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1633.556287] env[62619]: DEBUG nova.compute.manager [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Refreshing instance network info cache due to event network-changed-4d9eadaf-f867-4642-bf56-7866858ac8b0. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1633.556495] env[62619]: DEBUG oslo_concurrency.lockutils [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] Acquiring lock "refresh_cache-7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.556685] env[62619]: DEBUG oslo_concurrency.lockutils [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] Acquired lock "refresh_cache-7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.556859] env[62619]: DEBUG nova.network.neutron [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Refreshing network info cache for port 4d9eadaf-f867-4642-bf56-7866858ac8b0 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1633.609772] env[62619]: DEBUG nova.scheduler.client.report [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1633.691925] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528e23e1-c4fe-de02-c486-8d243302ff06, 'name': SearchDatastore_Task, 'duration_secs': 0.025362} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.694128] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-244d9773-b44c-4378-b061-9c63bb8fdf72 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.701954] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1633.701954] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7a0a9-00a9-5cf1-1fda-fc7e03ab4854" [ 1633.701954] env[62619]: _type = "Task" [ 1633.701954] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.711960] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7a0a9-00a9-5cf1-1fda-fc7e03ab4854, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.798261] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.798538] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.798746] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.798930] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.799127] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.801266] env[62619]: INFO nova.compute.manager [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Terminating instance [ 1634.039824] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "cef20063-96f0-46cc-9f7d-4436b60216c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.040124] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.042329] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "cef20063-96f0-46cc-9f7d-4436b60216c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.042562] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.042753] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.044848] env[62619]: INFO nova.compute.manager [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Terminating instance [ 1634.115140] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.951s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.117879] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.016s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.119489] env[62619]: INFO nova.compute.claims [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1634.137655] env[62619]: INFO nova.scheduler.client.report [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Deleted allocations for instance 6cd2f6e6-79a4-41be-a349-b504028ecab4 [ 1634.210727] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7a0a9-00a9-5cf1-1fda-fc7e03ab4854, 'name': SearchDatastore_Task, 'duration_secs': 0.048271} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.211035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.213821] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cbff225f-2d11-4a43-a320-95dd3afb8e48/cbff225f-2d11-4a43-a320-95dd3afb8e48.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1634.213821] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7482a770-053e-464c-9e6e-cf9718c42191 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.219766] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1634.219766] env[62619]: value = "task-1777908" [ 1634.219766] env[62619]: _type = "Task" [ 1634.219766] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.228627] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.305411] env[62619]: DEBUG nova.compute.manager [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1634.305617] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1634.306568] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dca45a7-bc91-4538-aba7-4dbd37e482c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.313895] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1634.314146] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b870f0c-8c9f-4dcb-9be0-2c072ab0ca92 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.319984] env[62619]: DEBUG oslo_vmware.api [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1634.319984] env[62619]: value = "task-1777909" [ 1634.319984] env[62619]: _type = "Task" [ 1634.319984] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.329026] env[62619]: DEBUG oslo_vmware.api [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777909, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.548736] env[62619]: DEBUG nova.compute.manager [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1634.549041] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1634.550013] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cdcc24-560c-46e0-a1e1-e5f3faca1e27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.558940] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1634.559189] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1baea0d-e158-4109-acf8-87e2fbe7f84a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.565122] env[62619]: DEBUG oslo_vmware.api [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1634.565122] env[62619]: value = "task-1777910" [ 1634.565122] env[62619]: _type = "Task" [ 1634.565122] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.573962] env[62619]: DEBUG oslo_vmware.api [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.646123] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3a7e421b-c03a-4c13-94e6-9e648aef2574 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "6cd2f6e6-79a4-41be-a349-b504028ecab4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.679s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.699775] env[62619]: DEBUG nova.network.neutron [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Updated VIF entry in instance network info cache for port 4d9eadaf-f867-4642-bf56-7866858ac8b0. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1634.700277] env[62619]: DEBUG nova.network.neutron [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Updating instance_info_cache with network_info: [{"id": "4d9eadaf-f867-4642-bf56-7866858ac8b0", "address": "fa:16:3e:15:87:9e", "network": {"id": "fa982f07-dafb-40ea-9b20-3b2193167cd2", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-992408249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c0a55c7699c4147a75d6f5f4dfe6e12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d9eadaf-f8", "ovs_interfaceid": "4d9eadaf-f867-4642-bf56-7866858ac8b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1634.735451] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777908, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.839589] env[62619]: DEBUG oslo_vmware.api [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777909, 'name': PowerOffVM_Task, 'duration_secs': 0.269895} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.839589] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1634.839589] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1634.840989] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c9218a2-3937-4061-b590-8e143485eb99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.071120] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1635.071545] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1635.071545] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Deleting the datastore file [datastore1] dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1635.072377] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48914d5c-7966-40bc-bd11-2ea4d1348700 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.078411] env[62619]: DEBUG oslo_vmware.api [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777910, 'name': PowerOffVM_Task, 'duration_secs': 0.363849} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.079163] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1635.079452] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1635.079624] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09770bb3-8e8b-473c-9732-0341f9cd0cff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.082581] env[62619]: DEBUG oslo_vmware.api [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1635.082581] env[62619]: value = "task-1777912" [ 1635.082581] env[62619]: _type = "Task" [ 1635.082581] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.091375] env[62619]: DEBUG oslo_vmware.api [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777912, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.161021] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1635.161021] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1635.161021] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Deleting the datastore file [datastore1] cef20063-96f0-46cc-9f7d-4436b60216c6 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1635.161021] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6dfa7f9-b891-418f-9c53-05de3df15f3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.165964] env[62619]: DEBUG oslo_vmware.api [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for the task: (returnval){ [ 1635.165964] env[62619]: value = "task-1777914" [ 1635.165964] env[62619]: _type = "Task" [ 1635.165964] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.176362] env[62619]: DEBUG oslo_vmware.api [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777914, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.204837] env[62619]: DEBUG oslo_concurrency.lockutils [req-6ee5d722-3c3f-4867-873c-a634e284848a req-fc9b7ad2-b01f-4f60-9283-c86df2745334 service nova] Releasing lock "refresh_cache-7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1635.237020] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766201} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.237020] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cbff225f-2d11-4a43-a320-95dd3afb8e48/cbff225f-2d11-4a43-a320-95dd3afb8e48.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1635.237020] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1635.237020] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ecf29618-0151-44ab-ba76-dbeceb81c54d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.243152] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1635.243152] env[62619]: value = "task-1777915" [ 1635.243152] env[62619]: _type = "Task" [ 1635.243152] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.253562] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777915, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.341861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "11869077-b428-413f-9f8f-7eac08d2d9ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.342123] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.495114] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb612d62-b9c1-4459-a050-de909ff96b2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.503215] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523b3163-bb89-485b-aea5-05f3690bb76c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.532717] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f13e8c-e542-48d1-8b7a-327924a36966 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.540162] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babda3b2-7be6-47de-9887-11fab37c852c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.554146] env[62619]: DEBUG nova.compute.provider_tree [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1635.591839] env[62619]: DEBUG oslo_vmware.api [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777912, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253151} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.592203] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1635.592290] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1635.592447] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1635.592615] env[62619]: INFO nova.compute.manager [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1635.592880] env[62619]: DEBUG oslo.service.loopingcall [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1635.593087] env[62619]: DEBUG nova.compute.manager [-] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1635.593180] env[62619]: DEBUG nova.network.neutron [-] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1635.678112] env[62619]: DEBUG oslo_vmware.api [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Task: {'id': task-1777914, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196214} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.678731] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1635.678731] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1635.678860] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1635.679032] env[62619]: INFO nova.compute.manager [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1635.679269] env[62619]: DEBUG oslo.service.loopingcall [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1635.679452] env[62619]: DEBUG nova.compute.manager [-] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1635.679541] env[62619]: DEBUG nova.network.neutron [-] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1635.753669] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777915, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082287} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.754505] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1635.755321] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ee790c-1490-41cb-9ac8-5bde8855a41e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.777612] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] cbff225f-2d11-4a43-a320-95dd3afb8e48/cbff225f-2d11-4a43-a320-95dd3afb8e48.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1635.779939] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c62c937f-6f4d-47c9-8368-28240bebe4d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.800048] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1635.800048] env[62619]: value = "task-1777916" [ 1635.800048] env[62619]: _type = "Task" [ 1635.800048] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.808058] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777916, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.845187] env[62619]: DEBUG nova.compute.manager [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1635.900036] env[62619]: DEBUG nova.compute.manager [req-1dda953b-705c-4264-b44a-8aed36a970c1 req-6a234177-7d6b-44e8-9dd0-e5a753b61780 service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Received event network-vif-deleted-b1ace9af-97b6-4d21-bbe4-972a2a1c1e13 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1635.900036] env[62619]: INFO nova.compute.manager [req-1dda953b-705c-4264-b44a-8aed36a970c1 req-6a234177-7d6b-44e8-9dd0-e5a753b61780 service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Neutron deleted interface b1ace9af-97b6-4d21-bbe4-972a2a1c1e13; detaching it from the instance and deleting it from the info cache [ 1635.900223] env[62619]: DEBUG nova.network.neutron [req-1dda953b-705c-4264-b44a-8aed36a970c1 req-6a234177-7d6b-44e8-9dd0-e5a753b61780 service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.057664] env[62619]: DEBUG nova.scheduler.client.report [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1636.175114] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "cb33580e-d70d-4557-98fe-e673d93f3307" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.175114] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "cb33580e-d70d-4557-98fe-e673d93f3307" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.312836] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777916, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.368432] env[62619]: DEBUG nova.network.neutron [-] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.370654] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.407269] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-092732ba-710a-42de-976f-83e47326f550 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.415036] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5803b5fa-f3a0-4191-abab-c445766dfe5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.457083] env[62619]: DEBUG nova.compute.manager [req-1dda953b-705c-4264-b44a-8aed36a970c1 req-6a234177-7d6b-44e8-9dd0-e5a753b61780 service nova] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Detach interface failed, port_id=b1ace9af-97b6-4d21-bbe4-972a2a1c1e13, reason: Instance dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1636.459415] env[62619]: DEBUG nova.compute.manager [req-ef14ac71-bc4d-44b1-803d-86621e5f702b req-b74a33fd-11c4-4820-98d7-d0a8f2fc1478 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Received event network-vif-deleted-1e21d673-0db1-49d8-b86e-c8b8568b7452 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1636.459616] env[62619]: INFO nova.compute.manager [req-ef14ac71-bc4d-44b1-803d-86621e5f702b req-b74a33fd-11c4-4820-98d7-d0a8f2fc1478 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Neutron deleted interface 1e21d673-0db1-49d8-b86e-c8b8568b7452; detaching it from the instance and deleting it from the info cache [ 1636.459782] env[62619]: DEBUG nova.network.neutron [req-ef14ac71-bc4d-44b1-803d-86621e5f702b req-b74a33fd-11c4-4820-98d7-d0a8f2fc1478 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.566556] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.449s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.567091] env[62619]: DEBUG nova.compute.manager [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1636.573022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.808s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.573022] env[62619]: DEBUG nova.objects.instance [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lazy-loading 'resources' on Instance uuid 4b2e9965-cbd4-4d98-b003-436b4a8c913e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1636.677641] env[62619]: DEBUG nova.compute.manager [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1636.811578] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777916, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.871950] env[62619]: INFO nova.compute.manager [-] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Took 1.28 seconds to deallocate network for instance. [ 1636.910268] env[62619]: DEBUG nova.network.neutron [-] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.962025] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-820d31e7-6bc5-443f-804b-0577cf99263a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.971208] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee851dd9-6987-428b-8db2-1d03dc0a006e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.010207] env[62619]: DEBUG nova.compute.manager [req-ef14ac71-bc4d-44b1-803d-86621e5f702b req-b74a33fd-11c4-4820-98d7-d0a8f2fc1478 service nova] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Detach interface failed, port_id=1e21d673-0db1-49d8-b86e-c8b8568b7452, reason: Instance cef20063-96f0-46cc-9f7d-4436b60216c6 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1637.075225] env[62619]: DEBUG nova.compute.utils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1637.077063] env[62619]: DEBUG nova.compute.manager [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1637.077063] env[62619]: DEBUG nova.network.neutron [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1637.130752] env[62619]: DEBUG nova.policy [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e9094d6b3854c1184307d9bc35a966e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e11e1bca0c747fd8b4a0ca3e220ba4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1637.198807] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.314521] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777916, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.380443] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.413087] env[62619]: INFO nova.compute.manager [-] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Took 1.73 seconds to deallocate network for instance. [ 1637.421972] env[62619]: DEBUG nova.network.neutron [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Successfully created port: a160764e-324a-4c8b-96a5-89e51772aef6 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1637.506844] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c12a2a9-dc6d-4802-ba26-0c337e8ead27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.515098] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be83e191-9f6a-4174-9b23-bfe8a516aeee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.545058] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbb4d17-31f6-47f7-b568-36a669c51d71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.552574] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477e4b0c-0974-4480-882a-6f1704d07f44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.567961] env[62619]: DEBUG nova.compute.provider_tree [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1637.580536] env[62619]: DEBUG nova.compute.manager [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1637.812771] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777916, 'name': ReconfigVM_Task, 'duration_secs': 1.522651} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.813125] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Reconfigured VM instance instance-00000046 to attach disk [datastore1] cbff225f-2d11-4a43-a320-95dd3afb8e48/cbff225f-2d11-4a43-a320-95dd3afb8e48.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1637.813811] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d9800b96-b005-4a99-80fc-986d66bfc3ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.821124] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1637.821124] env[62619]: value = "task-1777917" [ 1637.821124] env[62619]: _type = "Task" [ 1637.821124] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.827839] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777917, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.926849] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.074255] env[62619]: DEBUG nova.scheduler.client.report [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1638.330263] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777917, 'name': Rename_Task, 'duration_secs': 0.156563} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.330572] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1638.330717] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89c2dafd-f3e2-4246-bf32-6c93101f51b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.337373] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1638.337373] env[62619]: value = "task-1777918" [ 1638.337373] env[62619]: _type = "Task" [ 1638.337373] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.344797] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777918, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.579515] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.582060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.219s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.582193] env[62619]: DEBUG nova.objects.instance [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lazy-loading 'resources' on Instance uuid 9014ef05-64d1-4bd6-9f2e-db58003b6520 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1638.592205] env[62619]: DEBUG nova.compute.manager [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1638.608163] env[62619]: INFO nova.scheduler.client.report [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Deleted allocations for instance 4b2e9965-cbd4-4d98-b003-436b4a8c913e [ 1638.620144] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1638.620390] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1638.620545] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1638.620723] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1638.620868] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1638.621067] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1638.621306] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1638.621467] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1638.621629] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1638.621786] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1638.621953] env[62619]: DEBUG nova.virt.hardware [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1638.622829] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940b5c66-9f18-4386-98d0-f144813cf34c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.631329] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439c8c77-c330-4f9b-bf98-b6c05b4790af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.850416] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777918, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.865639] env[62619]: DEBUG nova.compute.manager [req-2f789c5c-85f0-4f3b-841e-e6417ecd8c6b req-fb0deab4-7dfd-4648-9124-21778588535e service nova] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Received event network-vif-plugged-a160764e-324a-4c8b-96a5-89e51772aef6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1638.865867] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f789c5c-85f0-4f3b-841e-e6417ecd8c6b req-fb0deab4-7dfd-4648-9124-21778588535e service nova] Acquiring lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.866090] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f789c5c-85f0-4f3b-841e-e6417ecd8c6b req-fb0deab4-7dfd-4648-9124-21778588535e service nova] Lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.866260] env[62619]: DEBUG oslo_concurrency.lockutils [req-2f789c5c-85f0-4f3b-841e-e6417ecd8c6b req-fb0deab4-7dfd-4648-9124-21778588535e service nova] Lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.866423] env[62619]: DEBUG nova.compute.manager [req-2f789c5c-85f0-4f3b-841e-e6417ecd8c6b req-fb0deab4-7dfd-4648-9124-21778588535e service nova] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] No waiting events found dispatching network-vif-plugged-a160764e-324a-4c8b-96a5-89e51772aef6 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1638.866582] env[62619]: WARNING nova.compute.manager [req-2f789c5c-85f0-4f3b-841e-e6417ecd8c6b req-fb0deab4-7dfd-4648-9124-21778588535e service nova] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Received unexpected event network-vif-plugged-a160764e-324a-4c8b-96a5-89e51772aef6 for instance with vm_state building and task_state spawning. [ 1638.926223] env[62619]: DEBUG nova.network.neutron [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Successfully updated port: a160764e-324a-4c8b-96a5-89e51772aef6 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1639.116587] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe8c9823-9db2-41fb-9df8-865d23dbfd83 tempest-AttachInterfacesV270Test-526560893 tempest-AttachInterfacesV270Test-526560893-project-member] Lock "4b2e9965-cbd4-4d98-b003-436b4a8c913e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.974s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.348902] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777918, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.402970] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc33321-9c14-4e9c-b6aa-4187aaef74ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.410772] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5713eba1-4f40-4921-8770-e27b15de321c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.440169] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "refresh_cache-fdde42eb-766c-4549-aae5-f7b1a1097cc6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.440321] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "refresh_cache-fdde42eb-766c-4549-aae5-f7b1a1097cc6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.440470] env[62619]: DEBUG nova.network.neutron [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1639.442312] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfea345-92da-4341-9d2c-bf9d13f2b33a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.450663] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffd0468-9f7f-44a8-9a57-2341e489e9bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.464297] env[62619]: DEBUG nova.compute.provider_tree [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1639.853875] env[62619]: DEBUG oslo_vmware.api [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777918, 'name': PowerOnVM_Task, 'duration_secs': 1.018959} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.854148] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1639.854389] env[62619]: INFO nova.compute.manager [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Took 10.86 seconds to spawn the instance on the hypervisor. [ 1639.854572] env[62619]: DEBUG nova.compute.manager [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1639.855369] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20879869-e6a0-4fa0-9dad-0115234bbbc3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.967235] env[62619]: DEBUG nova.scheduler.client.report [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1639.977494] env[62619]: DEBUG nova.network.neutron [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1640.184682] env[62619]: DEBUG nova.network.neutron [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Updating instance_info_cache with network_info: [{"id": "a160764e-324a-4c8b-96a5-89e51772aef6", "address": "fa:16:3e:45:1b:41", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa160764e-32", "ovs_interfaceid": "a160764e-324a-4c8b-96a5-89e51772aef6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1640.371398] env[62619]: INFO nova.compute.manager [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Took 54.55 seconds to build instance. [ 1640.475018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.891s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.475018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.882s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.475702] env[62619]: DEBUG nova.objects.instance [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lazy-loading 'resources' on Instance uuid ec56c824-5f9a-47bf-bcd6-e456ddaad2f2 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1640.504238] env[62619]: INFO nova.scheduler.client.report [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Deleted allocations for instance 9014ef05-64d1-4bd6-9f2e-db58003b6520 [ 1640.691272] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "refresh_cache-fdde42eb-766c-4549-aae5-f7b1a1097cc6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.691605] env[62619]: DEBUG nova.compute.manager [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Instance network_info: |[{"id": "a160764e-324a-4c8b-96a5-89e51772aef6", "address": "fa:16:3e:45:1b:41", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa160764e-32", "ovs_interfaceid": "a160764e-324a-4c8b-96a5-89e51772aef6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1640.692049] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:1b:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a160764e-324a-4c8b-96a5-89e51772aef6', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1640.699823] env[62619]: DEBUG oslo.service.loopingcall [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.700062] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1640.700295] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a067da12-a755-457a-bc27-1f7fc6d7e212 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.723162] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1640.723162] env[62619]: value = "task-1777919" [ 1640.723162] env[62619]: _type = "Task" [ 1640.723162] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.735797] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777919, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.873475] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f24b562e-4f03-44e8-87d1-f18dded00fe9 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.181s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.013658] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e1eaa2b6-098f-427b-8d7f-b81eaff7196e tempest-SecurityGroupsTestJSON-1783304287 tempest-SecurityGroupsTestJSON-1783304287-project-member] Lock "9014ef05-64d1-4bd6-9f2e-db58003b6520" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.117s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.154421] env[62619]: DEBUG nova.compute.manager [req-4cc23436-7afa-45a8-ac72-3e2f58280ea5 req-c61621f6-086f-4d1b-bdf7-364b478d5f91 service nova] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Received event network-changed-a160764e-324a-4c8b-96a5-89e51772aef6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1641.154732] env[62619]: DEBUG nova.compute.manager [req-4cc23436-7afa-45a8-ac72-3e2f58280ea5 req-c61621f6-086f-4d1b-bdf7-364b478d5f91 service nova] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Refreshing instance network info cache due to event network-changed-a160764e-324a-4c8b-96a5-89e51772aef6. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1641.154732] env[62619]: DEBUG oslo_concurrency.lockutils [req-4cc23436-7afa-45a8-ac72-3e2f58280ea5 req-c61621f6-086f-4d1b-bdf7-364b478d5f91 service nova] Acquiring lock "refresh_cache-fdde42eb-766c-4549-aae5-f7b1a1097cc6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.154995] env[62619]: DEBUG oslo_concurrency.lockutils [req-4cc23436-7afa-45a8-ac72-3e2f58280ea5 req-c61621f6-086f-4d1b-bdf7-364b478d5f91 service nova] Acquired lock "refresh_cache-fdde42eb-766c-4549-aae5-f7b1a1097cc6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.156228] env[62619]: DEBUG nova.network.neutron [req-4cc23436-7afa-45a8-ac72-3e2f58280ea5 req-c61621f6-086f-4d1b-bdf7-364b478d5f91 service nova] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Refreshing network info cache for port a160764e-324a-4c8b-96a5-89e51772aef6 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1641.234034] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777919, 'name': CreateVM_Task, 'duration_secs': 0.396567} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.236539] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1641.237920] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.237920] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.238256] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1641.241047] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3873d983-c941-4c74-a76a-ceb6848cc1f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.243538] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1641.243538] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c62644-62e6-702c-8c4a-3af67681bc87" [ 1641.243538] env[62619]: _type = "Task" [ 1641.243538] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.254118] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c62644-62e6-702c-8c4a-3af67681bc87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.370895] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14878130-a671-4535-8eb6-af9a2bbfb6da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.378179] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be666b5-6233-4fcd-b989-28cba8a8ffe1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.420717] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf3651e-7e45-4c3d-8152-ce9596af0e68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.431509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca875a7-c450-4fd5-8ce2-7b95ca860f80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.446506] env[62619]: DEBUG nova.compute.provider_tree [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1641.756398] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c62644-62e6-702c-8c4a-3af67681bc87, 'name': SearchDatastore_Task, 'duration_secs': 0.017414} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.756729] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.756970] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1641.757206] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.757351] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.757534] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1641.758521] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d199f216-f6e4-4a38-9417-4af61c250a61 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.767022] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1641.767308] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1641.767968] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06a8159e-56e8-488c-a6de-eb8ea418ad9d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.776498] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1641.776498] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b9b0d0-9d05-40a8-ce5b-bc7876439055" [ 1641.776498] env[62619]: _type = "Task" [ 1641.776498] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.785780] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b9b0d0-9d05-40a8-ce5b-bc7876439055, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.949982] env[62619]: DEBUG nova.scheduler.client.report [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1642.009164] env[62619]: DEBUG nova.network.neutron [req-4cc23436-7afa-45a8-ac72-3e2f58280ea5 req-c61621f6-086f-4d1b-bdf7-364b478d5f91 service nova] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Updated VIF entry in instance network info cache for port a160764e-324a-4c8b-96a5-89e51772aef6. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1642.009563] env[62619]: DEBUG nova.network.neutron [req-4cc23436-7afa-45a8-ac72-3e2f58280ea5 req-c61621f6-086f-4d1b-bdf7-364b478d5f91 service nova] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Updating instance_info_cache with network_info: [{"id": "a160764e-324a-4c8b-96a5-89e51772aef6", "address": "fa:16:3e:45:1b:41", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa160764e-32", "ovs_interfaceid": "a160764e-324a-4c8b-96a5-89e51772aef6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1642.288478] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b9b0d0-9d05-40a8-ce5b-bc7876439055, 'name': SearchDatastore_Task, 'duration_secs': 0.018239} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.289068] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed395266-7bfa-4be3-be5c-3be51111e32b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.294860] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1642.294860] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e1fc56-7032-fa1b-398f-efbd35f0b6ce" [ 1642.294860] env[62619]: _type = "Task" [ 1642.294860] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.304492] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e1fc56-7032-fa1b-398f-efbd35f0b6ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.457777] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.982s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.460797] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.337s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.461702] env[62619]: INFO nova.compute.claims [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1642.490117] env[62619]: INFO nova.scheduler.client.report [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted allocations for instance ec56c824-5f9a-47bf-bcd6-e456ddaad2f2 [ 1642.513501] env[62619]: DEBUG oslo_concurrency.lockutils [req-4cc23436-7afa-45a8-ac72-3e2f58280ea5 req-c61621f6-086f-4d1b-bdf7-364b478d5f91 service nova] Releasing lock "refresh_cache-fdde42eb-766c-4549-aae5-f7b1a1097cc6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.807134] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e1fc56-7032-fa1b-398f-efbd35f0b6ce, 'name': SearchDatastore_Task, 'duration_secs': 0.024019} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.807432] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.807976] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] fdde42eb-766c-4549-aae5-f7b1a1097cc6/fdde42eb-766c-4549-aae5-f7b1a1097cc6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1642.808260] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f00fd12-6aa4-4d07-89e0-977fe894125c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.816021] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1642.816021] env[62619]: value = "task-1777920" [ 1642.816021] env[62619]: _type = "Task" [ 1642.816021] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.825042] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.887390] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "8c296f2c-3e47-4431-b0c0-f7f1706c4a12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.887631] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "8c296f2c-3e47-4431-b0c0-f7f1706c4a12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.000053] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4cc80622-3da9-4138-b88a-dd71ad5e6941 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "ec56c824-5f9a-47bf-bcd6-e456ddaad2f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.666s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.283947] env[62619]: DEBUG nova.compute.manager [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1643.326831] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777920, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.390018] env[62619]: DEBUG nova.compute.manager [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1643.805097] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.828492] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777920, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.006526} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.828839] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] fdde42eb-766c-4549-aae5-f7b1a1097cc6/fdde42eb-766c-4549-aae5-f7b1a1097cc6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1643.829128] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1643.829403] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a065d048-8905-4f4d-9204-bd3f2a3eb236 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.839131] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1643.839131] env[62619]: value = "task-1777921" [ 1643.839131] env[62619]: _type = "Task" [ 1643.839131] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.851153] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777921, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.892359] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6aeba2-7e90-425a-9e44-71004437c1eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.903302] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a383930-0d76-4d85-a7b8-a1f6eb68acbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.934309] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.935238] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5e1fe0-9dfb-4234-8253-0d91e9622fd5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.942791] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98be225b-4a4b-4864-a713-6dbe6c2bd854 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.956384] env[62619]: DEBUG nova.compute.provider_tree [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1643.962852] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "eca829be-d425-4668-9ebd-1247c5ff19d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.962852] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "eca829be-d425-4668-9ebd-1247c5ff19d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.963078] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "eca829be-d425-4668-9ebd-1247c5ff19d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.963267] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "eca829be-d425-4668-9ebd-1247c5ff19d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.963449] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "eca829be-d425-4668-9ebd-1247c5ff19d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.966074] env[62619]: INFO nova.compute.manager [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Terminating instance [ 1644.351464] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777921, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058245} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.351555] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1644.352393] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87cd859-ec80-498d-b78a-80a32dde8451 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.376783] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] fdde42eb-766c-4549-aae5-f7b1a1097cc6/fdde42eb-766c-4549-aae5-f7b1a1097cc6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1644.377132] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48c49102-e7a5-495e-8600-2977efdb933a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.397906] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1644.397906] env[62619]: value = "task-1777922" [ 1644.397906] env[62619]: _type = "Task" [ 1644.397906] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.409052] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777922, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.462587] env[62619]: DEBUG nova.scheduler.client.report [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1644.469889] env[62619]: DEBUG nova.compute.manager [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1644.470769] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1644.473089] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711366ab-6723-41e4-9376-a8490f4a947a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.488381] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1644.489663] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-030425b0-8b2a-4432-ac41-8988b2cf76c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.496577] env[62619]: DEBUG oslo_vmware.api [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1644.496577] env[62619]: value = "task-1777923" [ 1644.496577] env[62619]: _type = "Task" [ 1644.496577] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.505883] env[62619]: DEBUG oslo_vmware.api [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.525018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "b3d9c418-f521-4770-a381-5238be6cc33c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.525313] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "b3d9c418-f521-4770-a381-5238be6cc33c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.908628] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777922, 'name': ReconfigVM_Task, 'duration_secs': 0.319212} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.908989] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Reconfigured VM instance instance-00000047 to attach disk [datastore1] fdde42eb-766c-4549-aae5-f7b1a1097cc6/fdde42eb-766c-4549-aae5-f7b1a1097cc6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1644.909470] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6f17c1b-35ec-4c39-8ae2-08e8dd2a93e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.915154] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1644.915154] env[62619]: value = "task-1777924" [ 1644.915154] env[62619]: _type = "Task" [ 1644.915154] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.922557] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777924, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.966441] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.506s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.967095] env[62619]: DEBUG nova.compute.manager [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1644.969774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.151s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.970102] env[62619]: DEBUG nova.objects.instance [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lazy-loading 'resources' on Instance uuid b6aae13f-0711-4421-9d55-de7ece3e4b89 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1645.009998] env[62619]: DEBUG oslo_vmware.api [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777923, 'name': PowerOffVM_Task, 'duration_secs': 0.440028} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.011426] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1645.011426] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1645.011426] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-569c7375-39de-452a-b15b-54d02a2ccbc9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.028856] env[62619]: DEBUG nova.compute.manager [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1645.103106] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1645.103364] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1645.103579] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleting the datastore file [datastore1] eca829be-d425-4668-9ebd-1247c5ff19d0 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1645.103851] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8931ec71-4c72-4831-9aa9-5519fbc0a33a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.110904] env[62619]: DEBUG oslo_vmware.api [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1645.110904] env[62619]: value = "task-1777926" [ 1645.110904] env[62619]: _type = "Task" [ 1645.110904] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.118968] env[62619]: DEBUG oslo_vmware.api [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.426713] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777924, 'name': Rename_Task, 'duration_secs': 0.27027} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.427140] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1645.427427] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29c84bb1-5e9a-46de-afe5-9739b84365f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.435426] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1645.435426] env[62619]: value = "task-1777927" [ 1645.435426] env[62619]: _type = "Task" [ 1645.435426] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.445631] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777927, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.473489] env[62619]: DEBUG nova.compute.utils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1645.484886] env[62619]: DEBUG nova.compute.manager [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1645.484886] env[62619]: DEBUG nova.network.neutron [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1645.546751] env[62619]: DEBUG nova.policy [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd106b23f779045f788b2536afd8c623d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2377a52a195d4f0b9181207ab5741734', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1645.577758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.623878] env[62619]: DEBUG oslo_vmware.api [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1777926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392151} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.624056] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1645.624212] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1645.624424] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1645.624624] env[62619]: INFO nova.compute.manager [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1645.624904] env[62619]: DEBUG oslo.service.loopingcall [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1645.627610] env[62619]: DEBUG nova.compute.manager [-] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1645.627677] env[62619]: DEBUG nova.network.neutron [-] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1645.946482] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777927, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.985029] env[62619]: DEBUG nova.compute.manager [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1645.994722] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c280416-460b-41d7-9530-2e9bf98c3c0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.005278] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50baf2e8-11da-4652-9831-c5571e11f6b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.039667] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bc931a-152b-4e64-bf6b-acb840356972 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.048921] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f733e72-9a75-4fbc-beaf-3651817ed484 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.065372] env[62619]: DEBUG nova.compute.provider_tree [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1646.079484] env[62619]: DEBUG nova.compute.manager [req-f9430a71-884b-4e84-aac0-2f563cede005 req-4dd71a44-7140-49d6-a089-938c2633ab74 service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Received event network-vif-deleted-19adac57-34a5-41ed-8245-9f4cef383981 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1646.079484] env[62619]: INFO nova.compute.manager [req-f9430a71-884b-4e84-aac0-2f563cede005 req-4dd71a44-7140-49d6-a089-938c2633ab74 service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Neutron deleted interface 19adac57-34a5-41ed-8245-9f4cef383981; detaching it from the instance and deleting it from the info cache [ 1646.079484] env[62619]: DEBUG nova.network.neutron [req-f9430a71-884b-4e84-aac0-2f563cede005 req-4dd71a44-7140-49d6-a089-938c2633ab74 service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.189877] env[62619]: DEBUG nova.network.neutron [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Successfully created port: 8ba2df09-8b89-4f62-a33e-49835e08ced1 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1646.454372] env[62619]: DEBUG oslo_vmware.api [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777927, 'name': PowerOnVM_Task, 'duration_secs': 0.679803} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.455133] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1646.455133] env[62619]: INFO nova.compute.manager [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Took 7.86 seconds to spawn the instance on the hypervisor. [ 1646.457880] env[62619]: DEBUG nova.compute.manager [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1646.458824] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59849fef-a6f1-449e-bee1-5120f7ce3fb2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.501675] env[62619]: DEBUG nova.network.neutron [-] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.568778] env[62619]: DEBUG nova.scheduler.client.report [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1646.584716] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c47cb3cf-cdff-46f2-8071-303f71c65b75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.598040] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2d7f8d-4035-4719-b71e-7ec74403553d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.636029] env[62619]: DEBUG nova.compute.manager [req-f9430a71-884b-4e84-aac0-2f563cede005 req-4dd71a44-7140-49d6-a089-938c2633ab74 service nova] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Detach interface failed, port_id=19adac57-34a5-41ed-8245-9f4cef383981, reason: Instance eca829be-d425-4668-9ebd-1247c5ff19d0 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1646.985052] env[62619]: INFO nova.compute.manager [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Took 44.91 seconds to build instance. [ 1647.000474] env[62619]: DEBUG nova.compute.manager [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1647.007044] env[62619]: INFO nova.compute.manager [-] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Took 1.38 seconds to deallocate network for instance. [ 1647.034532] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1647.034679] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1647.037299] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1647.037299] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1647.037299] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1647.037299] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1647.037299] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1647.037299] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1647.037299] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1647.037299] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1647.037299] env[62619]: DEBUG nova.virt.hardware [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1647.037906] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57f6b7f-099d-4802-bfca-1153714a9f31 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.051027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eeffb26-382b-4737-abfb-3a600fcab810 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.075686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.106s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.079594] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.152s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.081262] env[62619]: INFO nova.compute.claims [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1647.105129] env[62619]: INFO nova.scheduler.client.report [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleted allocations for instance b6aae13f-0711-4421-9d55-de7ece3e4b89 [ 1647.491018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9d3dab57-05bc-44a8-a5f9-98c3d24916e8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.426s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.515046] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.615310] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8710244-ede9-416d-b00e-1177c89b2365 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "b6aae13f-0711-4421-9d55-de7ece3e4b89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.401s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.697149] env[62619]: DEBUG nova.compute.manager [req-d74376f6-0fda-4edb-ad8e-1e0774c04939 req-941754bc-e43b-4106-933e-5b0bdd65e635 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Received event network-vif-plugged-8ba2df09-8b89-4f62-a33e-49835e08ced1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1647.697149] env[62619]: DEBUG oslo_concurrency.lockutils [req-d74376f6-0fda-4edb-ad8e-1e0774c04939 req-941754bc-e43b-4106-933e-5b0bdd65e635 service nova] Acquiring lock "1257a23e-3beb-4357-9322-4b84c87d0c35-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.697149] env[62619]: DEBUG oslo_concurrency.lockutils [req-d74376f6-0fda-4edb-ad8e-1e0774c04939 req-941754bc-e43b-4106-933e-5b0bdd65e635 service nova] Lock "1257a23e-3beb-4357-9322-4b84c87d0c35-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.697149] env[62619]: DEBUG oslo_concurrency.lockutils [req-d74376f6-0fda-4edb-ad8e-1e0774c04939 req-941754bc-e43b-4106-933e-5b0bdd65e635 service nova] Lock "1257a23e-3beb-4357-9322-4b84c87d0c35-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.697149] env[62619]: DEBUG nova.compute.manager [req-d74376f6-0fda-4edb-ad8e-1e0774c04939 req-941754bc-e43b-4106-933e-5b0bdd65e635 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] No waiting events found dispatching network-vif-plugged-8ba2df09-8b89-4f62-a33e-49835e08ced1 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1647.697149] env[62619]: WARNING nova.compute.manager [req-d74376f6-0fda-4edb-ad8e-1e0774c04939 req-941754bc-e43b-4106-933e-5b0bdd65e635 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Received unexpected event network-vif-plugged-8ba2df09-8b89-4f62-a33e-49835e08ced1 for instance with vm_state building and task_state spawning. [ 1647.759753] env[62619]: DEBUG nova.network.neutron [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Successfully updated port: 8ba2df09-8b89-4f62-a33e-49835e08ced1 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1647.772659] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.772922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.773144] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.773326] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.773495] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.776315] env[62619]: INFO nova.compute.manager [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Terminating instance [ 1648.137867] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.138208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1648.263374] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "refresh_cache-1257a23e-3beb-4357-9322-4b84c87d0c35" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.263521] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "refresh_cache-1257a23e-3beb-4357-9322-4b84c87d0c35" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.263665] env[62619]: DEBUG nova.network.neutron [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1648.279919] env[62619]: DEBUG nova.compute.manager [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1648.279919] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1648.280729] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4310933d-e88f-42f2-b7e3-7b25e19e4344 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.290482] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1648.290728] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8209751-b786-4ca7-bce7-b46562a99fb1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.293607] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.293929] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1648.294037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.294198] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1648.294354] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.299169] env[62619]: DEBUG oslo_vmware.api [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1648.299169] env[62619]: value = "task-1777928" [ 1648.299169] env[62619]: _type = "Task" [ 1648.299169] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.299715] env[62619]: INFO nova.compute.manager [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Terminating instance [ 1648.309516] env[62619]: DEBUG oslo_vmware.api [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.441972] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345ea18d-ef6d-40c6-9d7d-e32e209b369e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.449814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9543c1-0b47-46e6-ba9a-da46850d2792 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.481377] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9b086d-7e94-4a13-a397-160dcf51f0e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.489344] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1801a93c-c60f-4a83-819b-e4a797dd4c85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.503667] env[62619]: DEBUG nova.compute.provider_tree [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1648.640386] env[62619]: DEBUG nova.compute.manager [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1648.794558] env[62619]: DEBUG nova.network.neutron [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1648.805607] env[62619]: DEBUG nova.compute.manager [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1648.805852] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1648.806630] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b25b493-363a-419d-9362-937d4a1ce350 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.814229] env[62619]: DEBUG oslo_vmware.api [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777928, 'name': PowerOffVM_Task, 'duration_secs': 0.177106} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.814874] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1648.815090] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1648.815617] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4d19997-c5b2-424f-9fd9-dfb0a9831183 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.818983] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1648.819528] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffa8d9ef-7ce3-4ea5-97c1-2b94a063ccbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.825855] env[62619]: DEBUG oslo_vmware.api [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1648.825855] env[62619]: value = "task-1777930" [ 1648.825855] env[62619]: _type = "Task" [ 1648.825855] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.835306] env[62619]: DEBUG oslo_vmware.api [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.968276] env[62619]: DEBUG nova.network.neutron [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Updating instance_info_cache with network_info: [{"id": "8ba2df09-8b89-4f62-a33e-49835e08ced1", "address": "fa:16:3e:d1:3a:af", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba2df09-8b", "ovs_interfaceid": "8ba2df09-8b89-4f62-a33e-49835e08ced1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.010026] env[62619]: DEBUG nova.scheduler.client.report [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1649.017289] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1649.017663] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1649.017966] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleting the datastore file [datastore1] fdde42eb-766c-4549-aae5-f7b1a1097cc6 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1649.018341] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a356d2f3-b8af-4d69-8384-b569caa6543c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.025483] env[62619]: DEBUG oslo_vmware.api [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1649.025483] env[62619]: value = "task-1777931" [ 1649.025483] env[62619]: _type = "Task" [ 1649.025483] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.035622] env[62619]: DEBUG oslo_vmware.api [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777931, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.165114] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.334797] env[62619]: DEBUG oslo_vmware.api [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777930, 'name': PowerOffVM_Task, 'duration_secs': 0.357866} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.335117] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1649.335292] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1649.335532] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8280292d-b631-4a67-9bb2-de6bbac2233c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.471982] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "refresh_cache-1257a23e-3beb-4357-9322-4b84c87d0c35" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1649.472337] env[62619]: DEBUG nova.compute.manager [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Instance network_info: |[{"id": "8ba2df09-8b89-4f62-a33e-49835e08ced1", "address": "fa:16:3e:d1:3a:af", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba2df09-8b", "ovs_interfaceid": "8ba2df09-8b89-4f62-a33e-49835e08ced1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1649.472772] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:3a:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ba2df09-8b89-4f62-a33e-49835e08ced1', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1649.480824] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Creating folder: Project (2377a52a195d4f0b9181207ab5741734). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1649.481106] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32349862-59cd-4beb-9ab9-9e1a3db69ee3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.492911] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Created folder: Project (2377a52a195d4f0b9181207ab5741734) in parent group-v368875. [ 1649.493107] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Creating folder: Instances. Parent ref: group-v369071. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1649.493323] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30476127-cc41-492b-b840-d9214b53cdb2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.501549] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Created folder: Instances in parent group-v369071. [ 1649.501763] env[62619]: DEBUG oslo.service.loopingcall [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1649.501936] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1649.502137] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-180697c3-8237-49c6-aa8a-37b1df2f1368 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.516081] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.516549] env[62619]: DEBUG nova.compute.manager [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1649.520190] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 33.538s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.528057] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1649.528057] env[62619]: value = "task-1777935" [ 1649.528057] env[62619]: _type = "Task" [ 1649.528057] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.547905] env[62619]: DEBUG oslo_vmware.api [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1777931, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188602} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.548225] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777935, 'name': CreateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.548600] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1649.548884] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1649.549181] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1649.549480] env[62619]: INFO nova.compute.manager [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1649.549848] env[62619]: DEBUG oslo.service.loopingcall [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1649.550151] env[62619]: DEBUG nova.compute.manager [-] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1649.550314] env[62619]: DEBUG nova.network.neutron [-] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1649.676720] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1649.676955] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1649.677159] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleting the datastore file [datastore1] 4763e489-5aeb-4dc0-b327-b79a55afdfe3 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1649.677446] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a7f5ff1-172f-497a-ab6c-f42fc0bed22c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.683981] env[62619]: DEBUG oslo_vmware.api [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1649.683981] env[62619]: value = "task-1777936" [ 1649.683981] env[62619]: _type = "Task" [ 1649.683981] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.692521] env[62619]: DEBUG oslo_vmware.api [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777936, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.725146] env[62619]: DEBUG nova.compute.manager [req-66aef19a-6eb5-47a8-abb1-c154d0bcba44 req-4ed3a6d8-08d5-43cd-b440-14d355ab55f2 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Received event network-changed-8ba2df09-8b89-4f62-a33e-49835e08ced1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1649.725402] env[62619]: DEBUG nova.compute.manager [req-66aef19a-6eb5-47a8-abb1-c154d0bcba44 req-4ed3a6d8-08d5-43cd-b440-14d355ab55f2 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Refreshing instance network info cache due to event network-changed-8ba2df09-8b89-4f62-a33e-49835e08ced1. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1649.725673] env[62619]: DEBUG oslo_concurrency.lockutils [req-66aef19a-6eb5-47a8-abb1-c154d0bcba44 req-4ed3a6d8-08d5-43cd-b440-14d355ab55f2 service nova] Acquiring lock "refresh_cache-1257a23e-3beb-4357-9322-4b84c87d0c35" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.725874] env[62619]: DEBUG oslo_concurrency.lockutils [req-66aef19a-6eb5-47a8-abb1-c154d0bcba44 req-4ed3a6d8-08d5-43cd-b440-14d355ab55f2 service nova] Acquired lock "refresh_cache-1257a23e-3beb-4357-9322-4b84c87d0c35" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.726103] env[62619]: DEBUG nova.network.neutron [req-66aef19a-6eb5-47a8-abb1-c154d0bcba44 req-4ed3a6d8-08d5-43cd-b440-14d355ab55f2 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Refreshing network info cache for port 8ba2df09-8b89-4f62-a33e-49835e08ced1 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1650.022210] env[62619]: DEBUG nova.compute.utils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1650.024348] env[62619]: DEBUG nova.compute.manager [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1650.024603] env[62619]: DEBUG nova.network.neutron [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1650.028533] env[62619]: INFO nova.compute.claims [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1650.042955] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777935, 'name': CreateVM_Task, 'duration_secs': 0.445944} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.044108] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1650.044108] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.044108] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.044343] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1650.044683] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d3f753-3784-4535-88ca-79f80d70d615 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.050656] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1650.050656] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c10567-9239-388e-d883-afb26abf8a77" [ 1650.050656] env[62619]: _type = "Task" [ 1650.050656] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.059127] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c10567-9239-388e-d883-afb26abf8a77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.078724] env[62619]: DEBUG nova.policy [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bce0b3b51e01431a846acf16541fef31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7e3c16dc9044a7ea891b555675de9ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1650.194642] env[62619]: DEBUG oslo_vmware.api [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777936, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213025} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.194642] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1650.194976] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1650.194976] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1650.195055] env[62619]: INFO nova.compute.manager [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Took 1.39 seconds to destroy the instance on the hypervisor. [ 1650.195302] env[62619]: DEBUG oslo.service.loopingcall [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1650.195486] env[62619]: DEBUG nova.compute.manager [-] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1650.195577] env[62619]: DEBUG nova.network.neutron [-] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1650.290495] env[62619]: DEBUG nova.network.neutron [-] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.330044] env[62619]: DEBUG nova.network.neutron [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Successfully created port: 3a6e75b1-5f37-429c-894d-3c696d3a1b44 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1650.533592] env[62619]: DEBUG nova.compute.manager [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1650.538990] env[62619]: INFO nova.compute.resource_tracker [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating resource usage from migration 7a58e51e-e1cb-4fe5-a12d-73e2a613ed67 [ 1650.563044] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c10567-9239-388e-d883-afb26abf8a77, 'name': SearchDatastore_Task, 'duration_secs': 0.011308} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.565731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.565962] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1650.568095] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.568095] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.568095] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1650.568095] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77473599-a31d-4731-9e71-7d04c84d6980 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.578038] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1650.578038] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1650.578038] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0fe83d6-0209-495c-be30-08e2e755551a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.583869] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1650.583869] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52183c21-e999-e892-c448-31cd351aca58" [ 1650.583869] env[62619]: _type = "Task" [ 1650.583869] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.591828] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52183c21-e999-e892-c448-31cd351aca58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.628771] env[62619]: DEBUG nova.compute.manager [req-03d7f7d6-bf37-4bab-9a5c-3fc8518d404d req-18445271-3fb5-4145-9448-79ff33e2c260 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Received event network-vif-deleted-b80ccdea-ed2b-4257-8c43-ae663d8b8bbc {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1650.628969] env[62619]: INFO nova.compute.manager [req-03d7f7d6-bf37-4bab-9a5c-3fc8518d404d req-18445271-3fb5-4145-9448-79ff33e2c260 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Neutron deleted interface b80ccdea-ed2b-4257-8c43-ae663d8b8bbc; detaching it from the instance and deleting it from the info cache [ 1650.629153] env[62619]: DEBUG nova.network.neutron [req-03d7f7d6-bf37-4bab-9a5c-3fc8518d404d req-18445271-3fb5-4145-9448-79ff33e2c260 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.631295] env[62619]: DEBUG nova.network.neutron [req-66aef19a-6eb5-47a8-abb1-c154d0bcba44 req-4ed3a6d8-08d5-43cd-b440-14d355ab55f2 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Updated VIF entry in instance network info cache for port 8ba2df09-8b89-4f62-a33e-49835e08ced1. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1650.631662] env[62619]: DEBUG nova.network.neutron [req-66aef19a-6eb5-47a8-abb1-c154d0bcba44 req-4ed3a6d8-08d5-43cd-b440-14d355ab55f2 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Updating instance_info_cache with network_info: [{"id": "8ba2df09-8b89-4f62-a33e-49835e08ced1", "address": "fa:16:3e:d1:3a:af", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba2df09-8b", "ovs_interfaceid": "8ba2df09-8b89-4f62-a33e-49835e08ced1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.792387] env[62619]: INFO nova.compute.manager [-] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Took 1.24 seconds to deallocate network for instance. [ 1650.879422] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c3b107-ca96-4676-9c55-13f83d40c897 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.887880] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de65fb28-a6a5-4d71-91dc-a74ff2226710 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.917845] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169ac3dc-2279-423a-b9ee-4c7dede32965 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.926027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80065b02-8996-4c6b-b0d4-9c910b834e9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.940069] env[62619]: DEBUG nova.compute.provider_tree [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1651.094282] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52183c21-e999-e892-c448-31cd351aca58, 'name': SearchDatastore_Task, 'duration_secs': 0.014345} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.095107] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a39259c-278e-43f0-9f69-c8db334040b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.100849] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1651.100849] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ec73f5-bb7c-c91f-1de8-08e6d99402be" [ 1651.100849] env[62619]: _type = "Task" [ 1651.100849] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.108628] env[62619]: DEBUG nova.network.neutron [-] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.109743] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ec73f5-bb7c-c91f-1de8-08e6d99402be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.136732] env[62619]: DEBUG oslo_concurrency.lockutils [req-66aef19a-6eb5-47a8-abb1-c154d0bcba44 req-4ed3a6d8-08d5-43cd-b440-14d355ab55f2 service nova] Releasing lock "refresh_cache-1257a23e-3beb-4357-9322-4b84c87d0c35" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.137099] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-add85b30-1e6f-4946-a2b5-5b59e78b3a25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.146989] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5547dbf-11f4-4d7b-b07c-73051fcb9aca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.176879] env[62619]: DEBUG nova.compute.manager [req-03d7f7d6-bf37-4bab-9a5c-3fc8518d404d req-18445271-3fb5-4145-9448-79ff33e2c260 service nova] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Detach interface failed, port_id=b80ccdea-ed2b-4257-8c43-ae663d8b8bbc, reason: Instance 4763e489-5aeb-4dc0-b327-b79a55afdfe3 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1651.298195] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.443261] env[62619]: DEBUG nova.scheduler.client.report [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1651.543322] env[62619]: DEBUG nova.compute.manager [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1651.569272] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1651.569516] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1651.569668] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1651.569845] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1651.569988] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1651.570165] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1651.570374] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1651.570530] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1651.570692] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1651.570850] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1651.571029] env[62619]: DEBUG nova.virt.hardware [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1651.571962] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0276cf9-9229-498f-85cf-cf759bb790a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.579765] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1422d7-fa85-4998-82c1-719528e8b9a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.610413] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ec73f5-bb7c-c91f-1de8-08e6d99402be, 'name': SearchDatastore_Task, 'duration_secs': 0.018892} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.610713] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.610987] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1257a23e-3beb-4357-9322-4b84c87d0c35/1257a23e-3beb-4357-9322-4b84c87d0c35.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1651.611261] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa12dc3a-739b-4fae-a38e-e3b56943f4f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.613900] env[62619]: INFO nova.compute.manager [-] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Took 1.42 seconds to deallocate network for instance. [ 1651.619407] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1651.619407] env[62619]: value = "task-1777937" [ 1651.619407] env[62619]: _type = "Task" [ 1651.619407] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.627310] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.788175] env[62619]: DEBUG nova.compute.manager [req-074470af-cb27-4e33-a4a0-410318ec8cbf req-b0bb166d-50a8-47e3-83f7-85590c9d885e service nova] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Received event network-vif-deleted-a160764e-324a-4c8b-96a5-89e51772aef6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1651.788495] env[62619]: DEBUG nova.compute.manager [req-074470af-cb27-4e33-a4a0-410318ec8cbf req-b0bb166d-50a8-47e3-83f7-85590c9d885e service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Received event network-vif-plugged-3a6e75b1-5f37-429c-894d-3c696d3a1b44 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1651.788676] env[62619]: DEBUG oslo_concurrency.lockutils [req-074470af-cb27-4e33-a4a0-410318ec8cbf req-b0bb166d-50a8-47e3-83f7-85590c9d885e service nova] Acquiring lock "d0258646-e687-4198-b7c8-7bd116e3bf18-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.788886] env[62619]: DEBUG oslo_concurrency.lockutils [req-074470af-cb27-4e33-a4a0-410318ec8cbf req-b0bb166d-50a8-47e3-83f7-85590c9d885e service nova] Lock "d0258646-e687-4198-b7c8-7bd116e3bf18-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.789148] env[62619]: DEBUG oslo_concurrency.lockutils [req-074470af-cb27-4e33-a4a0-410318ec8cbf req-b0bb166d-50a8-47e3-83f7-85590c9d885e service nova] Lock "d0258646-e687-4198-b7c8-7bd116e3bf18-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.789341] env[62619]: DEBUG nova.compute.manager [req-074470af-cb27-4e33-a4a0-410318ec8cbf req-b0bb166d-50a8-47e3-83f7-85590c9d885e service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] No waiting events found dispatching network-vif-plugged-3a6e75b1-5f37-429c-894d-3c696d3a1b44 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1651.789511] env[62619]: WARNING nova.compute.manager [req-074470af-cb27-4e33-a4a0-410318ec8cbf req-b0bb166d-50a8-47e3-83f7-85590c9d885e service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Received unexpected event network-vif-plugged-3a6e75b1-5f37-429c-894d-3c696d3a1b44 for instance with vm_state building and task_state spawning. [ 1651.852412] env[62619]: DEBUG nova.network.neutron [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Successfully updated port: 3a6e75b1-5f37-429c-894d-3c696d3a1b44 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1651.948771] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.428s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.949045] env[62619]: INFO nova.compute.manager [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Migrating [ 1651.956312] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.572s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.956596] env[62619]: DEBUG nova.objects.instance [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lazy-loading 'resources' on Instance uuid 0f925028-c376-438f-8a56-deaa23047199 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1652.122033] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.131174] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777937, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506643} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.131426] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1257a23e-3beb-4357-9322-4b84c87d0c35/1257a23e-3beb-4357-9322-4b84c87d0c35.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1652.131633] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1652.131872] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-633d2812-2b6f-42ec-b2e2-2c45f62c0e97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.137999] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1652.137999] env[62619]: value = "task-1777938" [ 1652.137999] env[62619]: _type = "Task" [ 1652.137999] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.147098] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777938, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.355070] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquiring lock "refresh_cache-d0258646-e687-4198-b7c8-7bd116e3bf18" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.355383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquired lock "refresh_cache-d0258646-e687-4198-b7c8-7bd116e3bf18" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.355436] env[62619]: DEBUG nova.network.neutron [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1652.470438] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.470438] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.470438] env[62619]: DEBUG nova.network.neutron [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1652.647668] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777938, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075033} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.650049] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1652.650994] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568dc01d-63ef-468c-8484-1b0c39a59f83 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.673668] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 1257a23e-3beb-4357-9322-4b84c87d0c35/1257a23e-3beb-4357-9322-4b84c87d0c35.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1652.675607] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63d8efda-0184-4e6f-8682-2f47727d5d39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.694730] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1652.694730] env[62619]: value = "task-1777939" [ 1652.694730] env[62619]: _type = "Task" [ 1652.694730] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.703206] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777939, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.824509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbfcc3a-564b-490f-9314-249ce8384613 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.832864] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffde73f-443a-44a3-a991-55f3fd2115bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.868227] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac534ac5-79f5-4c16-af79-957ac29f959f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.886807] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5c9967-b17e-465d-ad62-d344c45b38ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.913199] env[62619]: DEBUG nova.compute.provider_tree [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1652.925972] env[62619]: DEBUG nova.network.neutron [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1653.072727] env[62619]: DEBUG nova.network.neutron [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Updating instance_info_cache with network_info: [{"id": "3a6e75b1-5f37-429c-894d-3c696d3a1b44", "address": "fa:16:3e:ad:93:d2", "network": {"id": "607974ae-f450-474a-8021-fb8004da40fe", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-406913542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e3c16dc9044a7ea891b555675de9ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a6e75b1-5f", "ovs_interfaceid": "3a6e75b1-5f37-429c-894d-3c696d3a1b44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.176783] env[62619]: DEBUG nova.network.neutron [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance_info_cache with network_info: [{"id": "7df13a55-7d53-41b9-9489-591516bda30c", "address": "fa:16:3e:aa:15:ba", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df13a55-7d", "ovs_interfaceid": "7df13a55-7d53-41b9-9489-591516bda30c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.203705] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777939, 'name': ReconfigVM_Task, 'duration_secs': 0.381713} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.203953] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 1257a23e-3beb-4357-9322-4b84c87d0c35/1257a23e-3beb-4357-9322-4b84c87d0c35.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1653.204552] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a07d9b4-f01a-4cda-908f-46b9067da3b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.211127] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1653.211127] env[62619]: value = "task-1777940" [ 1653.211127] env[62619]: _type = "Task" [ 1653.211127] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.218946] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777940, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.422020] env[62619]: DEBUG nova.scheduler.client.report [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1653.575955] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Releasing lock "refresh_cache-d0258646-e687-4198-b7c8-7bd116e3bf18" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1653.575955] env[62619]: DEBUG nova.compute.manager [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Instance network_info: |[{"id": "3a6e75b1-5f37-429c-894d-3c696d3a1b44", "address": "fa:16:3e:ad:93:d2", "network": {"id": "607974ae-f450-474a-8021-fb8004da40fe", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-406913542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e3c16dc9044a7ea891b555675de9ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a6e75b1-5f", "ovs_interfaceid": "3a6e75b1-5f37-429c-894d-3c696d3a1b44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1653.575955] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:93:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a407774d-9c2a-411d-9d6f-9ca733b97f3f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a6e75b1-5f37-429c-894d-3c696d3a1b44', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1653.583557] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Creating folder: Project (b7e3c16dc9044a7ea891b555675de9ce). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1653.584280] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f315c2e-dabf-45e1-ab11-08e108b0c722 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.596989] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Created folder: Project (b7e3c16dc9044a7ea891b555675de9ce) in parent group-v368875. [ 1653.597187] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Creating folder: Instances. Parent ref: group-v369074. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1653.597415] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f03098ae-41fb-4d71-af0e-09463bae808f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.606052] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Created folder: Instances in parent group-v369074. [ 1653.606300] env[62619]: DEBUG oslo.service.loopingcall [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1653.606486] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1653.606758] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d36372b1-72c8-42c4-a10c-952fabd3f0ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.625076] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1653.625076] env[62619]: value = "task-1777943" [ 1653.625076] env[62619]: _type = "Task" [ 1653.625076] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.632181] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777943, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.680076] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1653.722318] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777940, 'name': Rename_Task, 'duration_secs': 0.144245} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.722567] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1653.722811] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44964d97-bd25-480c-bd44-6aa2ef2b835e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.729725] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1653.729725] env[62619]: value = "task-1777944" [ 1653.729725] env[62619]: _type = "Task" [ 1653.729725] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.737289] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777944, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.817261] env[62619]: DEBUG nova.compute.manager [req-a3a275b1-9d5f-4ac0-b88b-79911e3b9c62 req-19105d31-cadd-43db-ac2d-7a562363370d service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Received event network-changed-3a6e75b1-5f37-429c-894d-3c696d3a1b44 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1653.817514] env[62619]: DEBUG nova.compute.manager [req-a3a275b1-9d5f-4ac0-b88b-79911e3b9c62 req-19105d31-cadd-43db-ac2d-7a562363370d service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Refreshing instance network info cache due to event network-changed-3a6e75b1-5f37-429c-894d-3c696d3a1b44. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1653.817675] env[62619]: DEBUG oslo_concurrency.lockutils [req-a3a275b1-9d5f-4ac0-b88b-79911e3b9c62 req-19105d31-cadd-43db-ac2d-7a562363370d service nova] Acquiring lock "refresh_cache-d0258646-e687-4198-b7c8-7bd116e3bf18" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.817828] env[62619]: DEBUG oslo_concurrency.lockutils [req-a3a275b1-9d5f-4ac0-b88b-79911e3b9c62 req-19105d31-cadd-43db-ac2d-7a562363370d service nova] Acquired lock "refresh_cache-d0258646-e687-4198-b7c8-7bd116e3bf18" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.818025] env[62619]: DEBUG nova.network.neutron [req-a3a275b1-9d5f-4ac0-b88b-79911e3b9c62 req-19105d31-cadd-43db-ac2d-7a562363370d service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Refreshing network info cache for port 3a6e75b1-5f37-429c-894d-3c696d3a1b44 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1653.924447] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.926815] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 34.435s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.947241] env[62619]: INFO nova.scheduler.client.report [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleted allocations for instance 0f925028-c376-438f-8a56-deaa23047199 [ 1654.134782] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777943, 'name': CreateVM_Task, 'duration_secs': 0.366359} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.134931] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1654.135509] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.135676] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.135992] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1654.136256] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe2418bc-f9cb-4ccd-9136-6972be0d43de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.140565] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for the task: (returnval){ [ 1654.140565] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ac18de-e478-19ce-3071-2c548eef6569" [ 1654.140565] env[62619]: _type = "Task" [ 1654.140565] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.147938] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ac18de-e478-19ce-3071-2c548eef6569, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.240109] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777944, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.455774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ecc8191-c037-4b1e-af45-caaa45feee52 tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "0f925028-c376-438f-8a56-deaa23047199" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.667s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.651376] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ac18de-e478-19ce-3071-2c548eef6569, 'name': SearchDatastore_Task, 'duration_secs': 0.017039} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.651542] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.651775] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1654.651996] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.652159] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.652332] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1654.652586] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be96a35d-f808-4d43-baea-263f578803ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.660876] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1654.661070] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1654.661862] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6b7e02c-d63f-4591-9c10-8e48649d65a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.667012] env[62619]: DEBUG nova.network.neutron [req-a3a275b1-9d5f-4ac0-b88b-79911e3b9c62 req-19105d31-cadd-43db-ac2d-7a562363370d service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Updated VIF entry in instance network info cache for port 3a6e75b1-5f37-429c-894d-3c696d3a1b44. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1654.667012] env[62619]: DEBUG nova.network.neutron [req-a3a275b1-9d5f-4ac0-b88b-79911e3b9c62 req-19105d31-cadd-43db-ac2d-7a562363370d service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Updating instance_info_cache with network_info: [{"id": "3a6e75b1-5f37-429c-894d-3c696d3a1b44", "address": "fa:16:3e:ad:93:d2", "network": {"id": "607974ae-f450-474a-8021-fb8004da40fe", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-406913542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e3c16dc9044a7ea891b555675de9ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a6e75b1-5f", "ovs_interfaceid": "3a6e75b1-5f37-429c-894d-3c696d3a1b44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.669396] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for the task: (returnval){ [ 1654.669396] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ce62fc-c567-8b8f-2037-fd1c3a6981d8" [ 1654.669396] env[62619]: _type = "Task" [ 1654.669396] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.677663] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ce62fc-c567-8b8f-2037-fd1c3a6981d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.741396] env[62619]: DEBUG oslo_vmware.api [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777944, 'name': PowerOnVM_Task, 'duration_secs': 0.712745} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.745033] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1654.745033] env[62619]: INFO nova.compute.manager [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Took 7.74 seconds to spawn the instance on the hypervisor. [ 1654.745033] env[62619]: DEBUG nova.compute.manager [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1654.745033] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6d13fe-428f-485d-ab92-4b2d9f094a76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.941750] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Applying migration context for instance da806d3f-79f0-4188-a2d8-0beeb9dfec1a as it has an incoming, in-progress migration 7a58e51e-e1cb-4fe5-a12d-73e2a613ed67. Migration status is migrating {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1654.943693] env[62619]: INFO nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating resource usage from migration 7a58e51e-e1cb-4fe5-a12d-73e2a613ed67 [ 1654.943774] env[62619]: INFO nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating resource usage from migration 0025d712-72a0-433a-9c2a-ce2a5e846a5e [ 1654.969049] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance ac03bcf3-61df-4557-8018-0ad54ef30f17 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.969242] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4763e489-5aeb-4dc0-b327-b79a55afdfe3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1654.969367] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.969484] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance aa4906f1-e801-4df0-819e-8c5fb5930fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.969609] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cef20063-96f0-46cc-9f7d-4436b60216c6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1654.969725] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e32cb991-a018-4b55-8cdf-378e212c8434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.969849] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance eca829be-d425-4668-9ebd-1247c5ff19d0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1654.969964] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance b1c3c213-599d-4cab-8224-d87467d774c9 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.970099] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.970233] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1654.970346] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance a6ba8114-0261-4894-98c0-9e0360f6d256 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.970466] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance a5e4c524-7cc8-4981-899e-1a7c80fac2bd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1654.970576] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 5cf7ca57-351f-48ab-8758-b30f50cd607f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.970696] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 769905db-d19a-411f-bb5d-8196056b82aa is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1654.970806] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.970923] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance fdde42eb-766c-4549-aae5-f7b1a1097cc6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1654.971043] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 1257a23e-3beb-4357-9322-4b84c87d0c35 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.971160] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance d0258646-e687-4198-b7c8-7bd116e3bf18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.971269] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Migration 7a58e51e-e1cb-4fe5-a12d-73e2a613ed67 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1654.971377] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance da806d3f-79f0-4188-a2d8-0beeb9dfec1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.170727] env[62619]: DEBUG oslo_concurrency.lockutils [req-a3a275b1-9d5f-4ac0-b88b-79911e3b9c62 req-19105d31-cadd-43db-ac2d-7a562363370d service nova] Releasing lock "refresh_cache-d0258646-e687-4198-b7c8-7bd116e3bf18" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1655.180321] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ce62fc-c567-8b8f-2037-fd1c3a6981d8, 'name': SearchDatastore_Task, 'duration_secs': 0.009116} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.181039] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fc7a2aa-f2d8-4e4c-b823-1640e8990f47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.186182] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for the task: (returnval){ [ 1655.186182] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52999103-f723-7797-75c1-0ae5acc115b0" [ 1655.186182] env[62619]: _type = "Task" [ 1655.186182] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.194849] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52999103-f723-7797-75c1-0ae5acc115b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.196358] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08cc0749-2779-47b3-a9ac-b2763ded7e00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.212965] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance 'da806d3f-79f0-4188-a2d8-0beeb9dfec1a' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1655.259740] env[62619]: INFO nova.compute.manager [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Took 44.16 seconds to build instance. [ 1655.474696] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 20d62152-3859-4023-a11d-b17c76e1090a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1655.697741] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52999103-f723-7797-75c1-0ae5acc115b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009423} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.697958] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1655.698237] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d0258646-e687-4198-b7c8-7bd116e3bf18/d0258646-e687-4198-b7c8-7bd116e3bf18.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1655.698491] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e0631cf-5998-4a4f-aca7-d9100e425f00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.705249] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for the task: (returnval){ [ 1655.705249] env[62619]: value = "task-1777945" [ 1655.705249] env[62619]: _type = "Task" [ 1655.705249] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.712814] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777945, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.718531] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1655.718763] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d7b0d38-850a-4582-8d93-da74778ef8c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.724564] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1655.724564] env[62619]: value = "task-1777946" [ 1655.724564] env[62619]: _type = "Task" [ 1655.724564] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.732171] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777946, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.761269] env[62619]: DEBUG oslo_concurrency.lockutils [None req-508c6e54-c665-42a0-bc4f-f0bcab84ffc2 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1257a23e-3beb-4357-9322-4b84c87d0c35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.665s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.978275] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 11869077-b428-413f-9f8f-7eac08d2d9ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1656.215654] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777945, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.234571] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1656.235531] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance 'da806d3f-79f0-4188-a2d8-0beeb9dfec1a' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1656.482100] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cb33580e-d70d-4557-98fe-e673d93f3307 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1656.482100] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Migration 0025d712-72a0-433a-9c2a-ce2a5e846a5e is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1656.613818] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "1257a23e-3beb-4357-9322-4b84c87d0c35" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.614109] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1257a23e-3beb-4357-9322-4b84c87d0c35" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.614322] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "1257a23e-3beb-4357-9322-4b84c87d0c35-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.614503] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1257a23e-3beb-4357-9322-4b84c87d0c35-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.614671] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1257a23e-3beb-4357-9322-4b84c87d0c35-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.617219] env[62619]: INFO nova.compute.manager [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Terminating instance [ 1656.715532] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777945, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586457} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.715769] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d0258646-e687-4198-b7c8-7bd116e3bf18/d0258646-e687-4198-b7c8-7bd116e3bf18.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1656.715972] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1656.716224] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e1fd6a3-dcaf-4d91-a174-f626c4a9bfb0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.722854] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for the task: (returnval){ [ 1656.722854] env[62619]: value = "task-1777947" [ 1656.722854] env[62619]: _type = "Task" [ 1656.722854] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.730047] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777947, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.741243] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1656.741454] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1656.741625] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1656.741824] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1656.741991] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1656.742172] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1656.742390] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1656.742552] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1656.742742] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1656.742983] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1656.743233] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1656.748639] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf394160-bae4-4802-9250-acbe253fd52a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.764976] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1656.764976] env[62619]: value = "task-1777948" [ 1656.764976] env[62619]: _type = "Task" [ 1656.764976] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.774110] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777948, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.985965] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 8c296f2c-3e47-4431-b0c0-f7f1706c4a12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1656.986215] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cbff225f-2d11-4a43-a320-95dd3afb8e48 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.120929] env[62619]: DEBUG nova.compute.manager [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1657.121187] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1657.122125] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379fb169-c9fb-4ca3-8bcf-78113d9a6df7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.130430] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1657.130678] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68a09f4f-7276-43f2-a098-0d95431f18b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.136977] env[62619]: DEBUG oslo_vmware.api [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1657.136977] env[62619]: value = "task-1777949" [ 1657.136977] env[62619]: _type = "Task" [ 1657.136977] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.145551] env[62619]: DEBUG oslo_vmware.api [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777949, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.232901] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777947, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06083} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.233256] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1657.234048] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e9c6f2-a734-4816-bac5-1204b3eb1f47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.255959] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] d0258646-e687-4198-b7c8-7bd116e3bf18/d0258646-e687-4198-b7c8-7bd116e3bf18.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1657.256607] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63938153-a09f-43da-b63a-ffe9c42a3f6a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.280070] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777948, 'name': ReconfigVM_Task, 'duration_secs': 0.243946} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.281335] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance 'da806d3f-79f0-4188-a2d8-0beeb9dfec1a' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1657.284675] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for the task: (returnval){ [ 1657.284675] env[62619]: value = "task-1777950" [ 1657.284675] env[62619]: _type = "Task" [ 1657.284675] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.292720] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777950, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.489591] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance b3d9c418-f521-4770-a381-5238be6cc33c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1657.647049] env[62619]: DEBUG oslo_vmware.api [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777949, 'name': PowerOffVM_Task, 'duration_secs': 0.421706} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.647363] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1657.647590] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1657.647852] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95b91962-7aae-4b7e-9f46-ec1db464bdec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.765585] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1657.765803] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1657.765991] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleting the datastore file [datastore1] 1257a23e-3beb-4357-9322-4b84c87d0c35 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1657.766313] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4620e07-ccdb-427d-b246-f83439db59ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.772568] env[62619]: DEBUG oslo_vmware.api [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1657.772568] env[62619]: value = "task-1777952" [ 1657.772568] env[62619]: _type = "Task" [ 1657.772568] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.780475] env[62619]: DEBUG oslo_vmware.api [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.790489] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1657.790704] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1657.790881] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1657.791091] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1657.791242] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1657.791388] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1657.791589] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1657.791745] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1657.791904] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1657.792080] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1657.792272] env[62619]: DEBUG nova.virt.hardware [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1657.797518] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Reconfiguring VM instance instance-00000027 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1657.797767] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94e1ab5c-c85e-4d60-8973-11774c98939d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.816409] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.817586] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1657.817586] env[62619]: value = "task-1777953" [ 1657.817586] env[62619]: _type = "Task" [ 1657.817586] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.826150] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777953, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.993102] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4983b333-debb-4a2b-b28d-b321f0d8d7d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1657.993102] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1657.993372] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3328MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1658.284348] env[62619]: DEBUG oslo_vmware.api [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1777952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.409042} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.284597] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1658.284767] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1658.284935] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1658.285166] env[62619]: INFO nova.compute.manager [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1658.285408] env[62619]: DEBUG oslo.service.loopingcall [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1658.285591] env[62619]: DEBUG nova.compute.manager [-] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1658.285683] env[62619]: DEBUG nova.network.neutron [-] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1658.296819] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777950, 'name': ReconfigVM_Task, 'duration_secs': 0.781085} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.297080] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Reconfigured VM instance instance-00000049 to attach disk [datastore1] d0258646-e687-4198-b7c8-7bd116e3bf18/d0258646-e687-4198-b7c8-7bd116e3bf18.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1658.297673] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2529fcb4-4bd8-4650-ba5e-becdc72dad78 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.305750] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for the task: (returnval){ [ 1658.305750] env[62619]: value = "task-1777954" [ 1658.305750] env[62619]: _type = "Task" [ 1658.305750] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.312958] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777954, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.328358] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777953, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.343761] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1ca729-8d62-468e-aa61-c7abc9ed0a07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.350599] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3bdb3b-3540-47f2-89fb-9029b3d460a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.381553] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c391db-9589-4dd7-8c01-4b158add9b3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.389223] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1722d5e5-024e-4ddd-b082-d69244232285 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.405365] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1658.621123] env[62619]: DEBUG nova.compute.manager [req-582ccdb0-640e-4845-a8d3-bdeafdceca8d req-27d275fe-03c1-4016-94ee-73a7ff9c4201 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Received event network-vif-deleted-8ba2df09-8b89-4f62-a33e-49835e08ced1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1658.621362] env[62619]: INFO nova.compute.manager [req-582ccdb0-640e-4845-a8d3-bdeafdceca8d req-27d275fe-03c1-4016-94ee-73a7ff9c4201 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Neutron deleted interface 8ba2df09-8b89-4f62-a33e-49835e08ced1; detaching it from the instance and deleting it from the info cache [ 1658.621503] env[62619]: DEBUG nova.network.neutron [req-582ccdb0-640e-4845-a8d3-bdeafdceca8d req-27d275fe-03c1-4016-94ee-73a7ff9c4201 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.815069] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777954, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.828137] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777953, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.906994] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1659.100676] env[62619]: DEBUG nova.network.neutron [-] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.123414] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a226ef62-0e4a-43e3-af19-2a800484a088 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.133249] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103d0c25-42cb-42f6-b1f2-3fb580038883 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.164364] env[62619]: DEBUG nova.compute.manager [req-582ccdb0-640e-4845-a8d3-bdeafdceca8d req-27d275fe-03c1-4016-94ee-73a7ff9c4201 service nova] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Detach interface failed, port_id=8ba2df09-8b89-4f62-a33e-49835e08ced1, reason: Instance 1257a23e-3beb-4357-9322-4b84c87d0c35 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1659.315558] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777954, 'name': Rename_Task, 'duration_secs': 0.81914} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.315842] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1659.316111] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25dd8793-ba8b-4776-b041-48b8130e544a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.324976] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for the task: (returnval){ [ 1659.324976] env[62619]: value = "task-1777955" [ 1659.324976] env[62619]: _type = "Task" [ 1659.324976] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.333020] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.336121] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777953, 'name': ReconfigVM_Task, 'duration_secs': 1.160438} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.336399] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Reconfigured VM instance instance-00000027 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1659.337211] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cf7e6b-d1f6-473a-bb2e-1276af197229 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.359200] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] da806d3f-79f0-4188-a2d8-0beeb9dfec1a/da806d3f-79f0-4188-a2d8-0beeb9dfec1a.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1659.359483] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86d980bb-8397-4f03-84dd-3fbf76d5976f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.377903] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1659.377903] env[62619]: value = "task-1777956" [ 1659.377903] env[62619]: _type = "Task" [ 1659.377903] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.385692] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777956, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.412020] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1659.412020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.485s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.412020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.283s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.412285] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.414217] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.969s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.415618] env[62619]: INFO nova.compute.claims [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1659.442025] env[62619]: INFO nova.scheduler.client.report [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Deleted allocations for instance a5e4c524-7cc8-4981-899e-1a7c80fac2bd [ 1659.603545] env[62619]: INFO nova.compute.manager [-] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Took 1.32 seconds to deallocate network for instance. [ 1659.835982] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777955, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.886458] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777956, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.949988] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b222a348-7331-4d3a-a263-f4e7b124cd5f tempest-MultipleCreateTestJSON-1832519573 tempest-MultipleCreateTestJSON-1832519573-project-member] Lock "a5e4c524-7cc8-4981-899e-1a7c80fac2bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.352s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.110615] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.335583] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777955, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.387834] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777956, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.788462] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde2d78f-dafa-408e-bfa2-9d3d9cc77e79 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.796324] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25f8420-3151-4afc-9430-7f2c1a52f56f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.834478] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a63f526-7363-4e49-a050-e4d72b488a28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.846104] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fadaaedf-ff9a-4a89-96c6-93c2b8a99a85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.851589] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777955, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.860305] env[62619]: DEBUG nova.compute.provider_tree [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1660.888238] env[62619]: DEBUG oslo_vmware.api [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1777956, 'name': ReconfigVM_Task, 'duration_secs': 1.027165} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.888519] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Reconfigured VM instance instance-00000027 to attach disk [datastore1] da806d3f-79f0-4188-a2d8-0beeb9dfec1a/da806d3f-79f0-4188-a2d8-0beeb9dfec1a.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1660.888779] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance 'da806d3f-79f0-4188-a2d8-0beeb9dfec1a' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1661.340286] env[62619]: DEBUG oslo_vmware.api [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777955, 'name': PowerOnVM_Task, 'duration_secs': 1.558898} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.340557] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1661.340787] env[62619]: INFO nova.compute.manager [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Took 9.80 seconds to spawn the instance on the hypervisor. [ 1661.340933] env[62619]: DEBUG nova.compute.manager [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1661.341699] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2e7471-b97d-4e67-82ef-c7db68103b39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.365087] env[62619]: DEBUG nova.scheduler.client.report [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1661.396188] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74deeb5b-131b-41af-b6d0-1c6540075e16 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.414838] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8361c669-0b37-4c2f-9ac7-5ec9cb4a3a84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.432821] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance 'da806d3f-79f0-4188-a2d8-0beeb9dfec1a' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1661.859795] env[62619]: INFO nova.compute.manager [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Took 46.95 seconds to build instance. [ 1661.868117] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.868379] env[62619]: DEBUG nova.compute.manager [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1661.870799] env[62619]: DEBUG oslo_concurrency.lockutils [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.065s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.871073] env[62619]: DEBUG oslo_concurrency.lockutils [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.873152] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.503s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.875436] env[62619]: INFO nova.compute.claims [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1661.908219] env[62619]: INFO nova.scheduler.client.report [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleted allocations for instance 769905db-d19a-411f-bb5d-8196056b82aa [ 1662.002116] env[62619]: DEBUG nova.network.neutron [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Port 7df13a55-7d53-41b9-9489-591516bda30c binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1662.362541] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7a9fb0a0-2607-4cf7-813d-e7f7e77b3e57 tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lock "d0258646-e687-4198-b7c8-7bd116e3bf18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.470s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.381423] env[62619]: DEBUG nova.compute.utils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1662.382787] env[62619]: DEBUG nova.compute.manager [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1662.383125] env[62619]: DEBUG nova.network.neutron [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1662.415795] env[62619]: DEBUG oslo_concurrency.lockutils [None req-46253298-c45d-489e-88d2-caed1726f87b tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "769905db-d19a-411f-bb5d-8196056b82aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.151s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.487665] env[62619]: DEBUG nova.policy [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99fe16c5bb994c07ad632a2dbc6e0117', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b4732aa2aa21480ea16ad44afb31ef68', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1662.525802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquiring lock "d0258646-e687-4198-b7c8-7bd116e3bf18" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.525802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lock "d0258646-e687-4198-b7c8-7bd116e3bf18" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.525802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquiring lock "d0258646-e687-4198-b7c8-7bd116e3bf18-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.525802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lock "d0258646-e687-4198-b7c8-7bd116e3bf18-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.525802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lock "d0258646-e687-4198-b7c8-7bd116e3bf18-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.527145] env[62619]: INFO nova.compute.manager [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Terminating instance [ 1662.887202] env[62619]: DEBUG nova.compute.manager [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1663.019449] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.019554] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.019683] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.033028] env[62619]: DEBUG nova.compute.manager [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1663.033455] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1663.034678] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7df83d-deda-459a-a701-26e728828e1d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.044139] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1663.044480] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33c1921e-1635-40ea-bb30-6b9216d8d506 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.053947] env[62619]: DEBUG oslo_vmware.api [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for the task: (returnval){ [ 1663.053947] env[62619]: value = "task-1777957" [ 1663.053947] env[62619]: _type = "Task" [ 1663.053947] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.062379] env[62619]: DEBUG oslo_vmware.api [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.070052] env[62619]: DEBUG nova.network.neutron [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Successfully created port: e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1663.280813] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f88cf8d-ea16-40b1-8518-d712cc71f148 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.293313] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7852dd2-4b68-4bd8-ae16-95ddbf5b2a0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.329652] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8e2772-adce-4bc7-820d-0838625425ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.337386] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c95dbd-e842-4a7b-b9c2-53c269acb300 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.352633] env[62619]: DEBUG nova.compute.provider_tree [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1663.568418] env[62619]: DEBUG oslo_vmware.api [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777957, 'name': PowerOffVM_Task, 'duration_secs': 0.316451} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.568643] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1663.568813] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1663.569195] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3a50c06-8447-44ec-8900-ce4c153b2efd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.650611] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1663.650869] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1663.651080] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Deleting the datastore file [datastore1] d0258646-e687-4198-b7c8-7bd116e3bf18 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1663.651350] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc7a700d-77cc-41d0-bf78-4a4b973b9e13 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.659068] env[62619]: DEBUG oslo_vmware.api [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for the task: (returnval){ [ 1663.659068] env[62619]: value = "task-1777959" [ 1663.659068] env[62619]: _type = "Task" [ 1663.659068] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.668907] env[62619]: DEBUG oslo_vmware.api [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.856119] env[62619]: DEBUG nova.scheduler.client.report [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1663.896225] env[62619]: DEBUG nova.compute.manager [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1663.922413] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1663.922658] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1663.922808] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1663.922984] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1663.923201] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1663.923367] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1663.923573] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1663.923726] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1663.923887] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1663.924060] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1663.924237] env[62619]: DEBUG nova.virt.hardware [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1663.925101] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ce88f1-c3c0-432b-9f64-a93c98d8db6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.932731] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49788dd0-c2bd-484f-a5a8-5813bcbabc60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.060055] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1664.060315] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1664.060554] env[62619]: DEBUG nova.network.neutron [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1664.168562] env[62619]: DEBUG oslo_vmware.api [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Task: {'id': task-1777959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152853} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.168850] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1664.169426] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1664.169631] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1664.169808] env[62619]: INFO nova.compute.manager [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1664.170061] env[62619]: DEBUG oslo.service.loopingcall [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1664.170253] env[62619]: DEBUG nova.compute.manager [-] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1664.170347] env[62619]: DEBUG nova.network.neutron [-] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1664.362647] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.489s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.363208] env[62619]: DEBUG nova.compute.manager [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1664.366322] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.167s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.369130] env[62619]: INFO nova.compute.claims [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1664.428701] env[62619]: DEBUG nova.compute.manager [req-5a289f4c-0c87-4ab5-ba05-1073e3f08f88 req-9663aa26-4e75-4de2-a2b8-ccfd1768fe3e service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Received event network-vif-deleted-3a6e75b1-5f37-429c-894d-3c696d3a1b44 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1664.428701] env[62619]: INFO nova.compute.manager [req-5a289f4c-0c87-4ab5-ba05-1073e3f08f88 req-9663aa26-4e75-4de2-a2b8-ccfd1768fe3e service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Neutron deleted interface 3a6e75b1-5f37-429c-894d-3c696d3a1b44; detaching it from the instance and deleting it from the info cache [ 1664.428701] env[62619]: DEBUG nova.network.neutron [req-5a289f4c-0c87-4ab5-ba05-1073e3f08f88 req-9663aa26-4e75-4de2-a2b8-ccfd1768fe3e service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.620787] env[62619]: DEBUG nova.compute.manager [req-bb7e6d0e-87ac-4004-bc4d-2455859a9e53 req-33b1a561-90c0-4b7e-a524-461bb4dd4e3e service nova] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Received event network-vif-plugged-e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1664.621097] env[62619]: DEBUG oslo_concurrency.lockutils [req-bb7e6d0e-87ac-4004-bc4d-2455859a9e53 req-33b1a561-90c0-4b7e-a524-461bb4dd4e3e service nova] Acquiring lock "20d62152-3859-4023-a11d-b17c76e1090a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.621333] env[62619]: DEBUG oslo_concurrency.lockutils [req-bb7e6d0e-87ac-4004-bc4d-2455859a9e53 req-33b1a561-90c0-4b7e-a524-461bb4dd4e3e service nova] Lock "20d62152-3859-4023-a11d-b17c76e1090a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.621614] env[62619]: DEBUG oslo_concurrency.lockutils [req-bb7e6d0e-87ac-4004-bc4d-2455859a9e53 req-33b1a561-90c0-4b7e-a524-461bb4dd4e3e service nova] Lock "20d62152-3859-4023-a11d-b17c76e1090a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.621832] env[62619]: DEBUG nova.compute.manager [req-bb7e6d0e-87ac-4004-bc4d-2455859a9e53 req-33b1a561-90c0-4b7e-a524-461bb4dd4e3e service nova] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] No waiting events found dispatching network-vif-plugged-e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1664.622012] env[62619]: WARNING nova.compute.manager [req-bb7e6d0e-87ac-4004-bc4d-2455859a9e53 req-33b1a561-90c0-4b7e-a524-461bb4dd4e3e service nova] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Received unexpected event network-vif-plugged-e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20 for instance with vm_state building and task_state spawning. [ 1664.731034] env[62619]: DEBUG nova.network.neutron [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Successfully updated port: e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1664.793965] env[62619]: DEBUG nova.network.neutron [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance_info_cache with network_info: [{"id": "7df13a55-7d53-41b9-9489-591516bda30c", "address": "fa:16:3e:aa:15:ba", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df13a55-7d", "ovs_interfaceid": "7df13a55-7d53-41b9-9489-591516bda30c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.875726] env[62619]: DEBUG nova.compute.utils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1664.877158] env[62619]: DEBUG nova.compute.manager [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1664.877331] env[62619]: DEBUG nova.network.neutron [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1664.908310] env[62619]: DEBUG nova.network.neutron [-] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.931455] env[62619]: DEBUG nova.policy [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91b100cc8f94b93af086dafebe29092', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c24c9d49d8d4104a0868f126eb3a26e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1664.933977] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bcd6a629-3bba-4b2b-b7e8-0ce3ba8a152a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.947289] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb726055-7d69-4a3c-9a70-da29e8527bab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.985507] env[62619]: DEBUG nova.compute.manager [req-5a289f4c-0c87-4ab5-ba05-1073e3f08f88 req-9663aa26-4e75-4de2-a2b8-ccfd1768fe3e service nova] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Detach interface failed, port_id=3a6e75b1-5f37-429c-894d-3c696d3a1b44, reason: Instance d0258646-e687-4198-b7c8-7bd116e3bf18 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1665.226692] env[62619]: DEBUG nova.network.neutron [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Successfully created port: 6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1665.236385] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquiring lock "refresh_cache-20d62152-3859-4023-a11d-b17c76e1090a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.236536] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquired lock "refresh_cache-20d62152-3859-4023-a11d-b17c76e1090a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.236681] env[62619]: DEBUG nova.network.neutron [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1665.298161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.382501] env[62619]: DEBUG nova.compute.manager [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1665.410514] env[62619]: INFO nova.compute.manager [-] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Took 1.24 seconds to deallocate network for instance. [ 1665.665682] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1a0e7d-9558-42ed-84d9-c781aa67856a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.673224] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fdd98f-9e47-462e-b21b-facc475ca88e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.701952] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2ed145-b1bc-47d4-b107-3b713112bbcf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.709340] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51684e34-387e-4236-bcd4-6d479b1c21d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.722201] env[62619]: DEBUG nova.compute.provider_tree [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1665.765696] env[62619]: DEBUG nova.network.neutron [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1665.819965] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470e02d7-95ab-4710-9f69-d5a3b02786ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.842152] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f985d399-2675-4afa-bdec-0e140ed111e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.850086] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance 'da806d3f-79f0-4188-a2d8-0beeb9dfec1a' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1665.916638] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.997700] env[62619]: DEBUG nova.network.neutron [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Updating instance_info_cache with network_info: [{"id": "e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20", "address": "fa:16:3e:8c:9f:34", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.124", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6097edc-d3", "ovs_interfaceid": "e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.225533] env[62619]: DEBUG nova.scheduler.client.report [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1666.356401] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-96b041fd-3629-4997-8b69-284b7353a33b tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance 'da806d3f-79f0-4188-a2d8-0beeb9dfec1a' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1666.392694] env[62619]: DEBUG nova.compute.manager [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1666.417338] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1666.417604] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1666.417760] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1666.417937] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1666.418096] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1666.418247] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1666.418452] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1666.418608] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1666.418787] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1666.418948] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1666.419131] env[62619]: DEBUG nova.virt.hardware [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1666.420251] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9593c0c-236b-4bc8-b2b7-2a48aa9c5ffb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.429041] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dad5932-d51f-49ef-9a08-bf6e00319798 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.500290] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Releasing lock "refresh_cache-20d62152-3859-4023-a11d-b17c76e1090a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.500450] env[62619]: DEBUG nova.compute.manager [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Instance network_info: |[{"id": "e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20", "address": "fa:16:3e:8c:9f:34", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.124", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6097edc-d3", "ovs_interfaceid": "e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1666.501264] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:9f:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1666.510719] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Creating folder: Project (b4732aa2aa21480ea16ad44afb31ef68). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1666.510719] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae419882-4bd3-4b36-b09f-450f19214f80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.522097] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Created folder: Project (b4732aa2aa21480ea16ad44afb31ef68) in parent group-v368875. [ 1666.522293] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Creating folder: Instances. Parent ref: group-v369077. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1666.522515] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e1a444c-fc9a-4afb-9830-f6b21059fb27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.531042] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Created folder: Instances in parent group-v369077. [ 1666.531042] env[62619]: DEBUG oslo.service.loopingcall [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1666.531310] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1666.531310] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ef7ce16-6dc3-4bdd-acc1-ebcf01c6adb6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.550280] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1666.550280] env[62619]: value = "task-1777962" [ 1666.550280] env[62619]: _type = "Task" [ 1666.550280] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.557669] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777962, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.609304] env[62619]: DEBUG nova.compute.manager [req-6c9e0f8f-9798-4926-b131-a7c002a0dc9f req-c760a94f-1d60-4f73-9d1f-3a7a32822825 service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Received event network-vif-plugged-6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1666.609520] env[62619]: DEBUG oslo_concurrency.lockutils [req-6c9e0f8f-9798-4926-b131-a7c002a0dc9f req-c760a94f-1d60-4f73-9d1f-3a7a32822825 service nova] Acquiring lock "11869077-b428-413f-9f8f-7eac08d2d9ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.609724] env[62619]: DEBUG oslo_concurrency.lockutils [req-6c9e0f8f-9798-4926-b131-a7c002a0dc9f req-c760a94f-1d60-4f73-9d1f-3a7a32822825 service nova] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.609876] env[62619]: DEBUG oslo_concurrency.lockutils [req-6c9e0f8f-9798-4926-b131-a7c002a0dc9f req-c760a94f-1d60-4f73-9d1f-3a7a32822825 service nova] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.610048] env[62619]: DEBUG nova.compute.manager [req-6c9e0f8f-9798-4926-b131-a7c002a0dc9f req-c760a94f-1d60-4f73-9d1f-3a7a32822825 service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] No waiting events found dispatching network-vif-plugged-6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1666.610215] env[62619]: WARNING nova.compute.manager [req-6c9e0f8f-9798-4926-b131-a7c002a0dc9f req-c760a94f-1d60-4f73-9d1f-3a7a32822825 service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Received unexpected event network-vif-plugged-6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81 for instance with vm_state building and task_state spawning. [ 1666.646233] env[62619]: DEBUG nova.compute.manager [req-1cd20688-cebb-47bd-b12e-d1539abc81cb req-71b335d8-3fcc-4d04-b2a1-528dfa43d234 service nova] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Received event network-changed-e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1666.646415] env[62619]: DEBUG nova.compute.manager [req-1cd20688-cebb-47bd-b12e-d1539abc81cb req-71b335d8-3fcc-4d04-b2a1-528dfa43d234 service nova] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Refreshing instance network info cache due to event network-changed-e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1666.646618] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cd20688-cebb-47bd-b12e-d1539abc81cb req-71b335d8-3fcc-4d04-b2a1-528dfa43d234 service nova] Acquiring lock "refresh_cache-20d62152-3859-4023-a11d-b17c76e1090a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.646755] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cd20688-cebb-47bd-b12e-d1539abc81cb req-71b335d8-3fcc-4d04-b2a1-528dfa43d234 service nova] Acquired lock "refresh_cache-20d62152-3859-4023-a11d-b17c76e1090a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.646912] env[62619]: DEBUG nova.network.neutron [req-1cd20688-cebb-47bd-b12e-d1539abc81cb req-71b335d8-3fcc-4d04-b2a1-528dfa43d234 service nova] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Refreshing network info cache for port e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1666.705551] env[62619]: DEBUG nova.network.neutron [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Successfully updated port: 6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1666.730479] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.731086] env[62619]: DEBUG nova.compute.manager [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1666.733956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.355s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.733956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.735677] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.809s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.735861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.737542] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 22.933s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.765025] env[62619]: INFO nova.scheduler.client.report [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Deleted allocations for instance dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5 [ 1666.767831] env[62619]: INFO nova.scheduler.client.report [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Deleted allocations for instance cef20063-96f0-46cc-9f7d-4436b60216c6 [ 1667.060533] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777962, 'name': CreateVM_Task, 'duration_secs': 0.429903} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.060533] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1667.061123] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.061412] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.061667] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1667.061933] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-708360f5-7b32-4bcd-830e-9f7b80d93848 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.066702] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for the task: (returnval){ [ 1667.066702] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5225b8eb-83f9-ecb1-dbdd-a48c5466a6bd" [ 1667.066702] env[62619]: _type = "Task" [ 1667.066702] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.074135] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5225b8eb-83f9-ecb1-dbdd-a48c5466a6bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.208576] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "refresh_cache-11869077-b428-413f-9f8f-7eac08d2d9ec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.208741] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "refresh_cache-11869077-b428-413f-9f8f-7eac08d2d9ec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.208883] env[62619]: DEBUG nova.network.neutron [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1667.248735] env[62619]: INFO nova.compute.claims [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1667.257825] env[62619]: DEBUG nova.compute.utils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1667.267013] env[62619]: DEBUG nova.compute.manager [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1667.267244] env[62619]: DEBUG nova.network.neutron [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1667.278252] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fcdd9fb-af15-4c8b-8020-a3f7b4b32fc8 tempest-VolumesAdminNegativeTest-403242254 tempest-VolumesAdminNegativeTest-403242254-project-member] Lock "cef20063-96f0-46cc-9f7d-4436b60216c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.238s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.279869] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b046f00-48cb-4724-8aa1-5a778d320656 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.481s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.314032] env[62619]: DEBUG nova.policy [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f5836e62ab7440fa798f7bea287572e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed0964fc0c414168b3027730645f7ee8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1667.496776] env[62619]: DEBUG nova.network.neutron [req-1cd20688-cebb-47bd-b12e-d1539abc81cb req-71b335d8-3fcc-4d04-b2a1-528dfa43d234 service nova] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Updated VIF entry in instance network info cache for port e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1667.497104] env[62619]: DEBUG nova.network.neutron [req-1cd20688-cebb-47bd-b12e-d1539abc81cb req-71b335d8-3fcc-4d04-b2a1-528dfa43d234 service nova] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Updating instance_info_cache with network_info: [{"id": "e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20", "address": "fa:16:3e:8c:9f:34", "network": {"id": "ca1278c8-e89e-4d12-9b8d-c9a08690f05e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.124", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "6c0620dc847547f18c2b3b70b01d1230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6097edc-d3", "ovs_interfaceid": "e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1667.577827] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5225b8eb-83f9-ecb1-dbdd-a48c5466a6bd, 'name': SearchDatastore_Task, 'duration_secs': 0.009372} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.578154] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.578436] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1667.578693] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.578855] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.579050] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1667.579317] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58d1737b-9b32-4757-8409-82070f01505b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.587465] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1667.587648] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1667.588365] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce75bd93-4281-4fc2-b6f1-223d73883ab9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.593467] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for the task: (returnval){ [ 1667.593467] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52133c0c-f808-afce-6126-e93f99ea1a51" [ 1667.593467] env[62619]: _type = "Task" [ 1667.593467] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.600910] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52133c0c-f808-afce-6126-e93f99ea1a51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.763016] env[62619]: DEBUG nova.network.neutron [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1667.768659] env[62619]: INFO nova.compute.resource_tracker [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating resource usage from migration 0025d712-72a0-433a-9c2a-ce2a5e846a5e [ 1667.772126] env[62619]: DEBUG nova.compute.manager [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1667.967528] env[62619]: DEBUG nova.network.neutron [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Successfully created port: a779a2e4-0edc-4f00-ad83-a86f4cc102c8 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1667.999733] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cd20688-cebb-47bd-b12e-d1539abc81cb req-71b335d8-3fcc-4d04-b2a1-528dfa43d234 service nova] Releasing lock "refresh_cache-20d62152-3859-4023-a11d-b17c76e1090a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.032611] env[62619]: DEBUG nova.network.neutron [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Updating instance_info_cache with network_info: [{"id": "6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81", "address": "fa:16:3e:a3:be:5f", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ee41f5a-d2", "ovs_interfaceid": "6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.107540] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52133c0c-f808-afce-6126-e93f99ea1a51, 'name': SearchDatastore_Task, 'duration_secs': 0.008256} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.110697] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52a45ebe-062c-42bf-88f4-11abb8b058e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.121714] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for the task: (returnval){ [ 1668.121714] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521ef81c-b550-1827-022d-873b38e2bb8f" [ 1668.121714] env[62619]: _type = "Task" [ 1668.121714] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.128789] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521ef81c-b550-1827-022d-873b38e2bb8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.159638] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa79df5-999b-4017-9a20-ed24c3dcb302 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.167603] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7b28da-40ff-49ca-aef4-567c0306a467 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.204857] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9df481-809c-472b-b4a7-89582bcc8429 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.217021] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfd62d1-79a3-4221-98a4-dfe42bb4af1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.230525] env[62619]: DEBUG nova.compute.provider_tree [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1668.444713] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.444991] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.445343] env[62619]: DEBUG nova.compute.manager [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Going to confirm migration 3 {{(pid=62619) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1668.536657] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "refresh_cache-11869077-b428-413f-9f8f-7eac08d2d9ec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.537052] env[62619]: DEBUG nova.compute.manager [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Instance network_info: |[{"id": "6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81", "address": "fa:16:3e:a3:be:5f", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ee41f5a-d2", "ovs_interfaceid": "6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1668.537829] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:be:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1668.554994] env[62619]: DEBUG oslo.service.loopingcall [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1668.555277] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1668.556038] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d41a2f2-e0ac-4bb0-be81-865c60ffc1ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.579233] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1668.579233] env[62619]: value = "task-1777963" [ 1668.579233] env[62619]: _type = "Task" [ 1668.579233] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.597298] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777963, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.630750] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521ef81c-b550-1827-022d-873b38e2bb8f, 'name': SearchDatastore_Task, 'duration_secs': 0.009053} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.631127] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.631464] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 20d62152-3859-4023-a11d-b17c76e1090a/20d62152-3859-4023-a11d-b17c76e1090a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1668.631763] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-052244b8-309b-4368-8ff6-c5155650e824 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.639235] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for the task: (returnval){ [ 1668.639235] env[62619]: value = "task-1777964" [ 1668.639235] env[62619]: _type = "Task" [ 1668.639235] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.649034] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.651911] env[62619]: DEBUG nova.compute.manager [req-a5c068e5-468c-47cf-857f-80dce281d9de req-5ab75d9e-ad7f-4229-a660-9729654edee0 service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Received event network-changed-6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1668.651911] env[62619]: DEBUG nova.compute.manager [req-a5c068e5-468c-47cf-857f-80dce281d9de req-5ab75d9e-ad7f-4229-a660-9729654edee0 service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Refreshing instance network info cache due to event network-changed-6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1668.652178] env[62619]: DEBUG oslo_concurrency.lockutils [req-a5c068e5-468c-47cf-857f-80dce281d9de req-5ab75d9e-ad7f-4229-a660-9729654edee0 service nova] Acquiring lock "refresh_cache-11869077-b428-413f-9f8f-7eac08d2d9ec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.652178] env[62619]: DEBUG oslo_concurrency.lockutils [req-a5c068e5-468c-47cf-857f-80dce281d9de req-5ab75d9e-ad7f-4229-a660-9729654edee0 service nova] Acquired lock "refresh_cache-11869077-b428-413f-9f8f-7eac08d2d9ec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.652264] env[62619]: DEBUG nova.network.neutron [req-a5c068e5-468c-47cf-857f-80dce281d9de req-5ab75d9e-ad7f-4229-a660-9729654edee0 service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Refreshing network info cache for port 6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1668.736392] env[62619]: DEBUG nova.scheduler.client.report [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1668.784520] env[62619]: DEBUG nova.compute.manager [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1668.813119] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1668.813119] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1668.813119] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1668.813119] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1668.813469] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1668.813759] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1668.814085] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1668.814377] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1668.814663] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1668.815190] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1668.815622] env[62619]: DEBUG nova.virt.hardware [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1668.816938] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba144e9-4358-4f06-88b5-0d201a46d0bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.826890] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e7046c-3ff4-4238-b111-e4a76590ebeb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.073885] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.075943] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.075943] env[62619]: DEBUG nova.network.neutron [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1669.075943] env[62619]: DEBUG nova.objects.instance [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'info_cache' on Instance uuid da806d3f-79f0-4188-a2d8-0beeb9dfec1a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.089979] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777963, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.150318] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777964, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.243651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.506s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.245486] env[62619]: INFO nova.compute.manager [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Migrating [ 1669.255641] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.321s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.258380] env[62619]: INFO nova.compute.claims [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1669.268989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.268989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.268989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.268989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.268989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.269312] env[62619]: INFO nova.compute.manager [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Terminating instance [ 1669.391334] env[62619]: DEBUG nova.network.neutron [req-a5c068e5-468c-47cf-857f-80dce281d9de req-5ab75d9e-ad7f-4229-a660-9729654edee0 service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Updated VIF entry in instance network info cache for port 6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1669.391856] env[62619]: DEBUG nova.network.neutron [req-a5c068e5-468c-47cf-857f-80dce281d9de req-5ab75d9e-ad7f-4229-a660-9729654edee0 service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Updating instance_info_cache with network_info: [{"id": "6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81", "address": "fa:16:3e:a3:be:5f", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ee41f5a-d2", "ovs_interfaceid": "6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.590933] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777963, 'name': CreateVM_Task, 'duration_secs': 0.526845} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.590933] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1669.591689] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.591967] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.592030] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1669.592278] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc716aca-abc1-4d38-bebe-b1c1e1428cd7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.597277] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1669.597277] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52779c49-5671-510a-d520-720e6b1b6737" [ 1669.597277] env[62619]: _type = "Task" [ 1669.597277] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.606826] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52779c49-5671-510a-d520-720e6b1b6737, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.650407] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777964, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535591} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.650698] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 20d62152-3859-4023-a11d-b17c76e1090a/20d62152-3859-4023-a11d-b17c76e1090a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1669.652077] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1669.652077] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1fb9855-4309-489f-8fa9-40aef7909c94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.658791] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for the task: (returnval){ [ 1669.658791] env[62619]: value = "task-1777965" [ 1669.658791] env[62619]: _type = "Task" [ 1669.658791] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.668033] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777965, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.766702] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.766876] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.767808] env[62619]: DEBUG nova.network.neutron [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1669.783397] env[62619]: DEBUG nova.compute.manager [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1669.783621] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1669.784705] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e86272-4d9f-47c2-88ca-f89fde3da810 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.793430] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1669.793792] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5769b8f3-91d0-4a79-8582-5bba1769214c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.800638] env[62619]: DEBUG oslo_vmware.api [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1669.800638] env[62619]: value = "task-1777966" [ 1669.800638] env[62619]: _type = "Task" [ 1669.800638] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.809409] env[62619]: DEBUG oslo_vmware.api [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777966, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.894941] env[62619]: DEBUG oslo_concurrency.lockutils [req-a5c068e5-468c-47cf-857f-80dce281d9de req-5ab75d9e-ad7f-4229-a660-9729654edee0 service nova] Releasing lock "refresh_cache-11869077-b428-413f-9f8f-7eac08d2d9ec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.969831] env[62619]: DEBUG nova.compute.manager [req-86040b26-a5d7-4b6f-956a-06d0055bb888 req-393dc074-f9c4-434a-a974-68978fff3291 service nova] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Received event network-vif-plugged-a779a2e4-0edc-4f00-ad83-a86f4cc102c8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1669.970216] env[62619]: DEBUG oslo_concurrency.lockutils [req-86040b26-a5d7-4b6f-956a-06d0055bb888 req-393dc074-f9c4-434a-a974-68978fff3291 service nova] Acquiring lock "cb33580e-d70d-4557-98fe-e673d93f3307-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.970459] env[62619]: DEBUG oslo_concurrency.lockutils [req-86040b26-a5d7-4b6f-956a-06d0055bb888 req-393dc074-f9c4-434a-a974-68978fff3291 service nova] Lock "cb33580e-d70d-4557-98fe-e673d93f3307-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.970666] env[62619]: DEBUG oslo_concurrency.lockutils [req-86040b26-a5d7-4b6f-956a-06d0055bb888 req-393dc074-f9c4-434a-a974-68978fff3291 service nova] Lock "cb33580e-d70d-4557-98fe-e673d93f3307-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.971050] env[62619]: DEBUG nova.compute.manager [req-86040b26-a5d7-4b6f-956a-06d0055bb888 req-393dc074-f9c4-434a-a974-68978fff3291 service nova] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] No waiting events found dispatching network-vif-plugged-a779a2e4-0edc-4f00-ad83-a86f4cc102c8 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1669.971255] env[62619]: WARNING nova.compute.manager [req-86040b26-a5d7-4b6f-956a-06d0055bb888 req-393dc074-f9c4-434a-a974-68978fff3291 service nova] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Received unexpected event network-vif-plugged-a779a2e4-0edc-4f00-ad83-a86f4cc102c8 for instance with vm_state building and task_state spawning. [ 1670.083649] env[62619]: DEBUG nova.network.neutron [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Successfully updated port: a779a2e4-0edc-4f00-ad83-a86f4cc102c8 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1670.110205] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52779c49-5671-510a-d520-720e6b1b6737, 'name': SearchDatastore_Task, 'duration_secs': 0.009671} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.110733] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.111154] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1670.111605] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.111767] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.111949] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1670.112217] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6de50ab4-56ee-4d44-8ea6-57f030023af4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.120819] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1670.120819] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1670.122091] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7551f3fc-461f-48e6-a45b-7f51e06257c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.127817] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1670.127817] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e81b5d-c3b2-054c-b70a-bc3c2013d89f" [ 1670.127817] env[62619]: _type = "Task" [ 1670.127817] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.136143] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e81b5d-c3b2-054c-b70a-bc3c2013d89f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.169148] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777965, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063111} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.169403] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1670.170198] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b170b9b4-2906-4201-8f21-4f467d13bd8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.194741] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 20d62152-3859-4023-a11d-b17c76e1090a/20d62152-3859-4023-a11d-b17c76e1090a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1670.195075] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-746b123c-2725-47c6-8818-2fcea437b13a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.218144] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for the task: (returnval){ [ 1670.218144] env[62619]: value = "task-1777967" [ 1670.218144] env[62619]: _type = "Task" [ 1670.218144] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.226442] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777967, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.313758] env[62619]: DEBUG oslo_vmware.api [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777966, 'name': PowerOffVM_Task, 'duration_secs': 0.219034} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.314058] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1670.314282] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1670.314487] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a177a88b-da2a-4a03-859f-3bd56871f345 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.401266] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1670.401608] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1670.401725] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Deleting the datastore file [datastore1] aa4906f1-e801-4df0-819e-8c5fb5930fb5 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1670.402632] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ee32744-611f-4735-95b9-0e02edf1286c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.413039] env[62619]: DEBUG oslo_vmware.api [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for the task: (returnval){ [ 1670.413039] env[62619]: value = "task-1777969" [ 1670.413039] env[62619]: _type = "Task" [ 1670.413039] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.420154] env[62619]: DEBUG oslo_vmware.api [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.445262] env[62619]: DEBUG nova.network.neutron [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance_info_cache with network_info: [{"id": "7df13a55-7d53-41b9-9489-591516bda30c", "address": "fa:16:3e:aa:15:ba", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df13a55-7d", "ovs_interfaceid": "7df13a55-7d53-41b9-9489-591516bda30c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.587997] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "refresh_cache-cb33580e-d70d-4557-98fe-e673d93f3307" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.588181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired lock "refresh_cache-cb33580e-d70d-4557-98fe-e673d93f3307" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.588434] env[62619]: DEBUG nova.network.neutron [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1670.635331] env[62619]: DEBUG nova.network.neutron [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance_info_cache with network_info: [{"id": "f289fb71-1285-4a29-9580-10815cd08cba", "address": "fa:16:3e:bf:f4:72", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf289fb71-12", "ovs_interfaceid": "f289fb71-1285-4a29-9580-10815cd08cba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.643129] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e81b5d-c3b2-054c-b70a-bc3c2013d89f, 'name': SearchDatastore_Task, 'duration_secs': 0.008657} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.646250] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b00e6c1-9dbe-4464-8cbf-863962e894d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.651806] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1670.651806] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5216ffd3-bd54-93f4-112b-76f103bff71d" [ 1670.651806] env[62619]: _type = "Task" [ 1670.651806] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.657383] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7375f60-7555-4594-88ca-36cf4e7fa56c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.662674] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5216ffd3-bd54-93f4-112b-76f103bff71d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.667788] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b76383f-2c00-4bb8-8cc4-126875198266 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.699708] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393d180e-af84-4882-93d6-d64a1bc15623 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.707488] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5721cf2c-5f11-4581-9d23-c02f2914477e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.721338] env[62619]: DEBUG nova.compute.provider_tree [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1670.730670] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777967, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.921539] env[62619]: DEBUG oslo_vmware.api [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Task: {'id': task-1777969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168419} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.921848] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1670.922054] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1670.922234] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1670.922409] env[62619]: INFO nova.compute.manager [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1670.922644] env[62619]: DEBUG oslo.service.loopingcall [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1670.923016] env[62619]: DEBUG nova.compute.manager [-] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1670.923183] env[62619]: DEBUG nova.network.neutron [-] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1670.949734] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.949986] env[62619]: DEBUG nova.objects.instance [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'migration_context' on Instance uuid da806d3f-79f0-4188-a2d8-0beeb9dfec1a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1671.125223] env[62619]: DEBUG nova.network.neutron [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1671.138605] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.166749] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5216ffd3-bd54-93f4-112b-76f103bff71d, 'name': SearchDatastore_Task, 'duration_secs': 0.014721} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.170487] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.170967] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 11869077-b428-413f-9f8f-7eac08d2d9ec/11869077-b428-413f-9f8f-7eac08d2d9ec.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1671.171402] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f34b85b-4ec8-4ce2-9d0f-43c654495af2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.180925] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1671.180925] env[62619]: value = "task-1777970" [ 1671.180925] env[62619]: _type = "Task" [ 1671.180925] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.191723] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.228309] env[62619]: DEBUG nova.scheduler.client.report [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1671.234530] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777967, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.361374] env[62619]: DEBUG nova.network.neutron [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Updating instance_info_cache with network_info: [{"id": "a779a2e4-0edc-4f00-ad83-a86f4cc102c8", "address": "fa:16:3e:e6:5c:db", "network": {"id": "15f89bb2-20f5-4ac8-8688-bfeed19bc7c0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1093042133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed0964fc0c414168b3027730645f7ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa779a2e4-0e", "ovs_interfaceid": "a779a2e4-0edc-4f00-ad83-a86f4cc102c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.422344] env[62619]: DEBUG nova.compute.manager [req-0930b687-29af-4852-adfe-b23909d9fd88 req-2330c69f-ed09-4bd6-b571-1bfb2ad5affb service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Received event network-vif-deleted-5530a947-e30a-4156-be0f-8e8dd90d2aef {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1671.422562] env[62619]: INFO nova.compute.manager [req-0930b687-29af-4852-adfe-b23909d9fd88 req-2330c69f-ed09-4bd6-b571-1bfb2ad5affb service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Neutron deleted interface 5530a947-e30a-4156-be0f-8e8dd90d2aef; detaching it from the instance and deleting it from the info cache [ 1671.423340] env[62619]: DEBUG nova.network.neutron [req-0930b687-29af-4852-adfe-b23909d9fd88 req-2330c69f-ed09-4bd6-b571-1bfb2ad5affb service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.458577] env[62619]: DEBUG nova.objects.base [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1671.459446] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc43298-6dea-4dff-b024-3e5246df41fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.482274] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4240358-8a25-49c0-8c54-551eb360906f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.488886] env[62619]: DEBUG oslo_vmware.api [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1671.488886] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a38af4-e589-c9e8-8d47-b3e6b34a401c" [ 1671.488886] env[62619]: _type = "Task" [ 1671.488886] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.498733] env[62619]: DEBUG oslo_vmware.api [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a38af4-e589-c9e8-8d47-b3e6b34a401c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.692035] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777970, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.732826] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777967, 'name': ReconfigVM_Task, 'duration_secs': 1.409487} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.733205] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 20d62152-3859-4023-a11d-b17c76e1090a/20d62152-3859-4023-a11d-b17c76e1090a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1671.733942] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa0ca742-86ce-4c70-beb8-4a959fdf1bfb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.736378] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.736835] env[62619]: DEBUG nova.compute.manager [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1671.739545] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.162s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.740902] env[62619]: INFO nova.compute.claims [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1671.747402] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for the task: (returnval){ [ 1671.747402] env[62619]: value = "task-1777971" [ 1671.747402] env[62619]: _type = "Task" [ 1671.747402] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.755525] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777971, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.795593] env[62619]: DEBUG nova.network.neutron [-] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.864858] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Releasing lock "refresh_cache-cb33580e-d70d-4557-98fe-e673d93f3307" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.865176] env[62619]: DEBUG nova.compute.manager [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Instance network_info: |[{"id": "a779a2e4-0edc-4f00-ad83-a86f4cc102c8", "address": "fa:16:3e:e6:5c:db", "network": {"id": "15f89bb2-20f5-4ac8-8688-bfeed19bc7c0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1093042133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed0964fc0c414168b3027730645f7ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa779a2e4-0e", "ovs_interfaceid": "a779a2e4-0edc-4f00-ad83-a86f4cc102c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1671.865645] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:5c:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a779a2e4-0edc-4f00-ad83-a86f4cc102c8', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1671.873662] env[62619]: DEBUG oslo.service.loopingcall [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1671.874103] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1671.874218] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-481f5888-7fc1-48f9-a6d6-3f5398facb41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.898217] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1671.898217] env[62619]: value = "task-1777972" [ 1671.898217] env[62619]: _type = "Task" [ 1671.898217] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.906161] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777972, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.927962] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-966c2a25-5639-4785-90f9-1d5d0aa67a30 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.937424] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58187e8f-1b79-4600-a97a-55fba69a8aa3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.971214] env[62619]: DEBUG nova.compute.manager [req-0930b687-29af-4852-adfe-b23909d9fd88 req-2330c69f-ed09-4bd6-b571-1bfb2ad5affb service nova] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Detach interface failed, port_id=5530a947-e30a-4156-be0f-8e8dd90d2aef, reason: Instance aa4906f1-e801-4df0-819e-8c5fb5930fb5 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1671.999127] env[62619]: DEBUG oslo_vmware.api [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a38af4-e589-c9e8-8d47-b3e6b34a401c, 'name': SearchDatastore_Task, 'duration_secs': 0.03032} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.999470] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.047239] env[62619]: DEBUG nova.compute.manager [req-741776ff-8c83-48fa-913e-65c29d95121e req-ad8f7ad5-b3ef-419d-b4b4-a3d2ef68596f service nova] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Received event network-changed-a779a2e4-0edc-4f00-ad83-a86f4cc102c8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1672.047239] env[62619]: DEBUG nova.compute.manager [req-741776ff-8c83-48fa-913e-65c29d95121e req-ad8f7ad5-b3ef-419d-b4b4-a3d2ef68596f service nova] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Refreshing instance network info cache due to event network-changed-a779a2e4-0edc-4f00-ad83-a86f4cc102c8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1672.047239] env[62619]: DEBUG oslo_concurrency.lockutils [req-741776ff-8c83-48fa-913e-65c29d95121e req-ad8f7ad5-b3ef-419d-b4b4-a3d2ef68596f service nova] Acquiring lock "refresh_cache-cb33580e-d70d-4557-98fe-e673d93f3307" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.047239] env[62619]: DEBUG oslo_concurrency.lockutils [req-741776ff-8c83-48fa-913e-65c29d95121e req-ad8f7ad5-b3ef-419d-b4b4-a3d2ef68596f service nova] Acquired lock "refresh_cache-cb33580e-d70d-4557-98fe-e673d93f3307" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.047392] env[62619]: DEBUG nova.network.neutron [req-741776ff-8c83-48fa-913e-65c29d95121e req-ad8f7ad5-b3ef-419d-b4b4-a3d2ef68596f service nova] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Refreshing network info cache for port a779a2e4-0edc-4f00-ad83-a86f4cc102c8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1672.193741] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777970, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552195} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.194093] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 11869077-b428-413f-9f8f-7eac08d2d9ec/11869077-b428-413f-9f8f-7eac08d2d9ec.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1672.194352] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1672.194664] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3188f13e-700b-4a97-becf-bc78615fe39c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.202196] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1672.202196] env[62619]: value = "task-1777973" [ 1672.202196] env[62619]: _type = "Task" [ 1672.202196] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.210689] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777973, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.245902] env[62619]: DEBUG nova.compute.utils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1672.249396] env[62619]: DEBUG nova.compute.manager [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1672.259273] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777971, 'name': Rename_Task, 'duration_secs': 0.164559} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.260635] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1672.260790] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7c95683-36ac-43e7-b1b9-56cf14e13d67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.269205] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for the task: (returnval){ [ 1672.269205] env[62619]: value = "task-1777974" [ 1672.269205] env[62619]: _type = "Task" [ 1672.269205] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.278513] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777974, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.299144] env[62619]: INFO nova.compute.manager [-] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Took 1.38 seconds to deallocate network for instance. [ 1672.409895] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777972, 'name': CreateVM_Task, 'duration_secs': 0.508915} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.411077] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1672.411077] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.411250] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.411572] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1672.411882] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc9fbd8d-e2ce-4990-83b6-e2b459842b4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.417053] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1672.417053] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fbb16d-bfc8-cd24-1ab7-11c22d4f21a2" [ 1672.417053] env[62619]: _type = "Task" [ 1672.417053] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.427157] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fbb16d-bfc8-cd24-1ab7-11c22d4f21a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.655457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40960780-2a27-4b3f-9f7f-9c72530af179 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.678852] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance 'cbff225f-2d11-4a43-a320-95dd3afb8e48' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1672.711653] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777973, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087789} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.711918] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1672.712883] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09198ed4-ecea-4203-b6f6-450513ad7ef6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.737054] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 11869077-b428-413f-9f8f-7eac08d2d9ec/11869077-b428-413f-9f8f-7eac08d2d9ec.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1672.740525] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd58ac19-ab4b-40e3-bfb5-2a1cadcb1501 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.756017] env[62619]: DEBUG nova.compute.manager [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1672.770656] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1672.770656] env[62619]: value = "task-1777975" [ 1672.770656] env[62619]: _type = "Task" [ 1672.770656] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.782425] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777974, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.785418] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777975, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.807928] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.884240] env[62619]: DEBUG nova.network.neutron [req-741776ff-8c83-48fa-913e-65c29d95121e req-ad8f7ad5-b3ef-419d-b4b4-a3d2ef68596f service nova] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Updated VIF entry in instance network info cache for port a779a2e4-0edc-4f00-ad83-a86f4cc102c8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1672.884655] env[62619]: DEBUG nova.network.neutron [req-741776ff-8c83-48fa-913e-65c29d95121e req-ad8f7ad5-b3ef-419d-b4b4-a3d2ef68596f service nova] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Updating instance_info_cache with network_info: [{"id": "a779a2e4-0edc-4f00-ad83-a86f4cc102c8", "address": "fa:16:3e:e6:5c:db", "network": {"id": "15f89bb2-20f5-4ac8-8688-bfeed19bc7c0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1093042133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed0964fc0c414168b3027730645f7ee8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa779a2e4-0e", "ovs_interfaceid": "a779a2e4-0edc-4f00-ad83-a86f4cc102c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.926945] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fbb16d-bfc8-cd24-1ab7-11c22d4f21a2, 'name': SearchDatastore_Task, 'duration_secs': 0.012373} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.929709] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.930041] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1672.930333] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.930499] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.930706] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1672.932027] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d42f3322-fd74-471f-837a-ae2e5c708c33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.942177] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1672.942393] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1672.945845] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25599aaa-6518-4c25-939c-ab1b8adbc155 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.952277] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1672.952277] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52411037-53ee-35eb-871f-2c91183b1285" [ 1672.952277] env[62619]: _type = "Task" [ 1672.952277] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.960316] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52411037-53ee-35eb-871f-2c91183b1285, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.114836] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dce238-6dad-481f-b029-fb52140dc5a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.122583] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798c9fbc-7ed8-41c4-a56a-ab92ff584c56 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.153708] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10da6b4d-ba67-4926-9557-c8023e2ad54a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.161361] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec91f9c3-5f8f-4ff8-90e0-db3d224b9a64 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.176158] env[62619]: DEBUG nova.compute.provider_tree [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1673.188058] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1673.188603] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d4c9a80-9bd8-4a20-8278-22cfdb3e2139 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.195417] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1673.195417] env[62619]: value = "task-1777976" [ 1673.195417] env[62619]: _type = "Task" [ 1673.195417] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.204889] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777976, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.280521] env[62619]: DEBUG oslo_vmware.api [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777974, 'name': PowerOnVM_Task, 'duration_secs': 0.808423} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.281246] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1673.281511] env[62619]: INFO nova.compute.manager [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Took 9.39 seconds to spawn the instance on the hypervisor. [ 1673.281758] env[62619]: DEBUG nova.compute.manager [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1673.282595] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8791222c-c012-49f7-9c37-809577866643 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.288166] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777975, 'name': ReconfigVM_Task, 'duration_secs': 0.47412} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.288815] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 11869077-b428-413f-9f8f-7eac08d2d9ec/11869077-b428-413f-9f8f-7eac08d2d9ec.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1673.289561] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67641a9d-9242-467d-a584-ee8220446ed1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.299893] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1673.299893] env[62619]: value = "task-1777977" [ 1673.299893] env[62619]: _type = "Task" [ 1673.299893] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.308099] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777977, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.388822] env[62619]: DEBUG oslo_concurrency.lockutils [req-741776ff-8c83-48fa-913e-65c29d95121e req-ad8f7ad5-b3ef-419d-b4b4-a3d2ef68596f service nova] Releasing lock "refresh_cache-cb33580e-d70d-4557-98fe-e673d93f3307" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.463875] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52411037-53ee-35eb-871f-2c91183b1285, 'name': SearchDatastore_Task, 'duration_secs': 0.009762} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.464688] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e907d447-bf2c-4e6f-b38f-cb8ef1c4dbd4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.470155] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1673.470155] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527a5ae7-fc18-06e3-7f42-29d1293bf87e" [ 1673.470155] env[62619]: _type = "Task" [ 1673.470155] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.478098] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527a5ae7-fc18-06e3-7f42-29d1293bf87e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.679073] env[62619]: DEBUG nova.scheduler.client.report [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1673.706258] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777976, 'name': PowerOffVM_Task, 'duration_secs': 0.199178} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.706749] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1673.707109] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance 'cbff225f-2d11-4a43-a320-95dd3afb8e48' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1673.767764] env[62619]: DEBUG nova.compute.manager [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1673.793630] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1673.793874] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1673.794042] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1673.794229] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1673.794374] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1673.794519] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1673.794723] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1673.794878] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1673.795052] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1673.795214] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1673.795413] env[62619]: DEBUG nova.virt.hardware [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1673.796355] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e56826-3c59-4c13-aafe-b16f0b129c90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.808872] env[62619]: INFO nova.compute.manager [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Took 49.39 seconds to build instance. [ 1673.813454] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7443d7dc-7819-4760-9a5b-05ae89d044e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.820471] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777977, 'name': Rename_Task, 'duration_secs': 0.220153} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.821061] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1673.821412] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b9f78ec-99d3-4aa2-8bbe-986bc8c0576d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.830950] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1673.836309] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Creating folder: Project (7aae1d7a02e74348b38928dcc204243f). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1673.837570] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c703b4ed-1e29-482a-8dc6-79b3d3b1d5df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.842033] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1673.842033] env[62619]: value = "task-1777978" [ 1673.842033] env[62619]: _type = "Task" [ 1673.842033] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.847068] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Created folder: Project (7aae1d7a02e74348b38928dcc204243f) in parent group-v368875. [ 1673.847252] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Creating folder: Instances. Parent ref: group-v369082. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1673.850371] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4eccf307-953a-4041-a137-5710e402661e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.852037] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.859749] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Created folder: Instances in parent group-v369082. [ 1673.859979] env[62619]: DEBUG oslo.service.loopingcall [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.860180] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1673.860386] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ba2828c-3c8b-4c9f-8a42-1a1c14d833fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.877200] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1673.877200] env[62619]: value = "task-1777981" [ 1673.877200] env[62619]: _type = "Task" [ 1673.877200] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.885070] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777981, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.980862] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527a5ae7-fc18-06e3-7f42-29d1293bf87e, 'name': SearchDatastore_Task, 'duration_secs': 0.009195} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.981187] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.981397] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cb33580e-d70d-4557-98fe-e673d93f3307/cb33580e-d70d-4557-98fe-e673d93f3307.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1673.981653] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4aeb634-eba4-4dbf-8e47-5558812e2995 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.988426] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1673.988426] env[62619]: value = "task-1777982" [ 1673.988426] env[62619]: _type = "Task" [ 1673.988426] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.997095] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.187129] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.187823] env[62619]: DEBUG nova.compute.manager [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1674.195453] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.680s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.195730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.198316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.033s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.200509] env[62619]: INFO nova.compute.claims [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1674.214281] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1674.214588] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1674.214803] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1674.218977] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1674.219525] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1674.219525] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1674.219647] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1674.219807] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1674.220030] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1674.220164] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1674.220341] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1674.225689] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36265e9d-9108-4eae-ac81-633fd3459ff4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.245098] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1674.245098] env[62619]: value = "task-1777983" [ 1674.245098] env[62619]: _type = "Task" [ 1674.245098] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.246267] env[62619]: INFO nova.scheduler.client.report [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted allocations for instance eca829be-d425-4668-9ebd-1247c5ff19d0 [ 1674.259520] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777983, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.312758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fe5d096-e55d-4244-bb4b-46d706800416 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lock "20d62152-3859-4023-a11d-b17c76e1090a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.896s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.356885] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777978, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.388309] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1777981, 'name': CreateVM_Task, 'duration_secs': 0.321215} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.388523] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1674.388946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.389185] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.389520] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1674.389783] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdec1a36-3dcd-4e1c-8e11-f98bdbad04b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.397055] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1674.397055] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bf8135-c497-5de1-a101-f69c80678658" [ 1674.397055] env[62619]: _type = "Task" [ 1674.397055] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.406694] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bf8135-c497-5de1-a101-f69c80678658, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.499539] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777982, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449269} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.499820] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cb33580e-d70d-4557-98fe-e673d93f3307/cb33580e-d70d-4557-98fe-e673d93f3307.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1674.500036] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1674.500377] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c5e0203-9f83-494d-acd0-618c839f0fd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.509356] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1674.509356] env[62619]: value = "task-1777984" [ 1674.509356] env[62619]: _type = "Task" [ 1674.509356] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.520565] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777984, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.708236] env[62619]: DEBUG nova.compute.utils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1674.712980] env[62619]: DEBUG nova.compute.manager [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1674.713174] env[62619]: DEBUG nova.network.neutron [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1674.757102] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3317e3af-f833-48a9-a604-4e5980c9621b tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "eca829be-d425-4668-9ebd-1247c5ff19d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.794s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.765651] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777983, 'name': ReconfigVM_Task, 'duration_secs': 0.354533} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.765651] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance 'cbff225f-2d11-4a43-a320-95dd3afb8e48' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1674.778161] env[62619]: DEBUG nova.policy [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cde3ba9ee004055bb5e09bc932dc4f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0948c27a2b08413ba82d553452965c9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1674.853595] env[62619]: DEBUG oslo_vmware.api [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1777978, 'name': PowerOnVM_Task, 'duration_secs': 0.689667} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.854073] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1674.854073] env[62619]: INFO nova.compute.manager [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1674.854310] env[62619]: DEBUG nova.compute.manager [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1674.855089] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95361d4-f3eb-481f-94f5-afc26b021c18 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.907614] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bf8135-c497-5de1-a101-f69c80678658, 'name': SearchDatastore_Task, 'duration_secs': 0.030026} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.907921] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.908881] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1674.909181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.909333] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.909513] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1674.909783] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2015e15a-167b-46e3-9ad2-85c1c010a635 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.918237] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1674.918913] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1674.919110] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5565bb78-ca5a-43ed-b9ac-b8b6e3b388c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.925111] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1674.925111] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520cf275-cc6a-52ec-109d-05ad68f43e1a" [ 1674.925111] env[62619]: _type = "Task" [ 1674.925111] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.934193] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520cf275-cc6a-52ec-109d-05ad68f43e1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.019913] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777984, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080533} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.020410] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1675.021301] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d79d24-f06a-4d21-96e5-14b7840168ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.044270] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] cb33580e-d70d-4557-98fe-e673d93f3307/cb33580e-d70d-4557-98fe-e673d93f3307.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1675.044756] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e3656d0-119b-4744-9fb4-0395421b081b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.067018] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1675.067018] env[62619]: value = "task-1777985" [ 1675.067018] env[62619]: _type = "Task" [ 1675.067018] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.080090] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777985, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.211739] env[62619]: DEBUG nova.compute.manager [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1675.278101] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1675.278342] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1675.278497] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1675.278677] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1675.278815] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1675.278958] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1675.279202] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1675.279453] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1675.279553] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1675.281407] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1675.281407] env[62619]: DEBUG nova.virt.hardware [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1675.285810] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Reconfiguring VM instance instance-00000046 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1675.287985] env[62619]: DEBUG nova.network.neutron [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Successfully created port: 24c2e90b-5ba7-4ae7-92a8-0666ce5bd797 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1675.294131] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cb88545-18a9-461f-8524-731e9d6c77ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.329672] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1675.329672] env[62619]: value = "task-1777986" [ 1675.329672] env[62619]: _type = "Task" [ 1675.329672] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.341515] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777986, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.379138] env[62619]: INFO nova.compute.manager [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Took 39.03 seconds to build instance. [ 1675.436346] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520cf275-cc6a-52ec-109d-05ad68f43e1a, 'name': SearchDatastore_Task, 'duration_secs': 0.00877} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.440010] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8828c754-9071-4660-a59d-dea27748a80a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.445306] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1675.445306] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ed5fe8-1647-8d31-5c41-e67e79abad12" [ 1675.445306] env[62619]: _type = "Task" [ 1675.445306] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.457152] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ed5fe8-1647-8d31-5c41-e67e79abad12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.577909] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777985, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.582925] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea3561a-a577-49af-b90d-7cb55ba95dd8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.590589] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8771faf-5376-4bd5-b1ff-b1c7f95df798 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.623974] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00224f39-7976-4858-961b-7ae364d604b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.630814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6794b1e-e577-45ec-a189-e5828a9a265b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.650024] env[62619]: DEBUG nova.compute.provider_tree [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.819406] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquiring lock "20d62152-3859-4023-a11d-b17c76e1090a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.819688] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lock "20d62152-3859-4023-a11d-b17c76e1090a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.819892] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquiring lock "20d62152-3859-4023-a11d-b17c76e1090a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.820086] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lock "20d62152-3859-4023-a11d-b17c76e1090a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.820256] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lock "20d62152-3859-4023-a11d-b17c76e1090a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.825262] env[62619]: INFO nova.compute.manager [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Terminating instance [ 1675.843116] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777986, 'name': ReconfigVM_Task, 'duration_secs': 0.357561} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.843984] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Reconfigured VM instance instance-00000046 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1675.844863] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e45ee2-e52b-405c-82e9-801b2f0c5d58 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.874683] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] cbff225f-2d11-4a43-a320-95dd3afb8e48/cbff225f-2d11-4a43-a320-95dd3afb8e48.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1675.875383] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b3ffcc4-a7dc-41dd-add8-19f7bad1f68a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.892060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed670006-ed5b-4db1-9b50-45d73ba4a5b0 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.550s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.897929] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1675.897929] env[62619]: value = "task-1777987" [ 1675.897929] env[62619]: _type = "Task" [ 1675.897929] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.908689] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777987, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.958264] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ed5fe8-1647-8d31-5c41-e67e79abad12, 'name': SearchDatastore_Task, 'duration_secs': 0.011212} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.958626] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.961109] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8c296f2c-3e47-4431-b0c0-f7f1706c4a12/8c296f2c-3e47-4431-b0c0-f7f1706c4a12.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1675.961109] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26bff444-069d-4972-867e-c985a1f9b0b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.966561] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1675.966561] env[62619]: value = "task-1777988" [ 1675.966561] env[62619]: _type = "Task" [ 1675.966561] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.975071] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.076934] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777985, 'name': ReconfigVM_Task, 'duration_secs': 0.639435} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.077272] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Reconfigured VM instance instance-0000004c to attach disk [datastore1] cb33580e-d70d-4557-98fe-e673d93f3307/cb33580e-d70d-4557-98fe-e673d93f3307.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1676.078236] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e83ba94-d9f1-4b74-86ec-e7258be6b068 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.084572] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1676.084572] env[62619]: value = "task-1777989" [ 1676.084572] env[62619]: _type = "Task" [ 1676.084572] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.094446] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777989, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.150381] env[62619]: DEBUG nova.scheduler.client.report [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1676.226176] env[62619]: DEBUG nova.compute.manager [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1676.262967] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1676.263226] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1676.263405] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1676.263646] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1676.263820] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1676.264014] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1676.264327] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1676.264833] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1676.264952] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1676.265568] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1676.265568] env[62619]: DEBUG nova.virt.hardware [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1676.266362] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fd981c-3732-4615-b91f-d30f2ae67647 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.277628] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b936d3a5-48ed-433a-97e2-aef925d37044 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.331438] env[62619]: DEBUG nova.compute.manager [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1676.331438] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1676.331835] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b1ce9d-d861-4b0f-aa96-bd9ec8258436 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.339861] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1676.340160] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d764e7bf-229d-4347-ad52-65a253f707ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.348859] env[62619]: DEBUG oslo_vmware.api [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for the task: (returnval){ [ 1676.348859] env[62619]: value = "task-1777990" [ 1676.348859] env[62619]: _type = "Task" [ 1676.348859] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.367948] env[62619]: DEBUG oslo_vmware.api [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.408345] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1777987, 'name': ReconfigVM_Task, 'duration_secs': 0.466064} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.408677] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Reconfigured VM instance instance-00000046 to attach disk [datastore1] cbff225f-2d11-4a43-a320-95dd3afb8e48/cbff225f-2d11-4a43-a320-95dd3afb8e48.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1676.408969] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance 'cbff225f-2d11-4a43-a320-95dd3afb8e48' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1676.482697] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777988, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468252} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.483059] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8c296f2c-3e47-4431-b0c0-f7f1706c4a12/8c296f2c-3e47-4431-b0c0-f7f1706c4a12.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1676.483386] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1676.483692] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-313d4e32-944b-4fc8-a3d7-5e90ddfe48b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.492437] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1676.492437] env[62619]: value = "task-1777991" [ 1676.492437] env[62619]: _type = "Task" [ 1676.492437] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.500948] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.598679] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777989, 'name': Rename_Task, 'duration_secs': 0.174769} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.598992] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1676.599623] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57f8f695-ca85-443d-9331-13146f540904 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.609212] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1676.609212] env[62619]: value = "task-1777992" [ 1676.609212] env[62619]: _type = "Task" [ 1676.609212] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.617508] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777992, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.641184] env[62619]: DEBUG nova.compute.manager [req-7b9f131d-b680-4785-8826-b0090dddcb31 req-7ac8de5b-6e11-45e5-87f8-a176bfb15bef service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Received event network-changed-6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1676.641184] env[62619]: DEBUG nova.compute.manager [req-7b9f131d-b680-4785-8826-b0090dddcb31 req-7ac8de5b-6e11-45e5-87f8-a176bfb15bef service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Refreshing instance network info cache due to event network-changed-6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1676.641184] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b9f131d-b680-4785-8826-b0090dddcb31 req-7ac8de5b-6e11-45e5-87f8-a176bfb15bef service nova] Acquiring lock "refresh_cache-11869077-b428-413f-9f8f-7eac08d2d9ec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.641184] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b9f131d-b680-4785-8826-b0090dddcb31 req-7ac8de5b-6e11-45e5-87f8-a176bfb15bef service nova] Acquired lock "refresh_cache-11869077-b428-413f-9f8f-7eac08d2d9ec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.641184] env[62619]: DEBUG nova.network.neutron [req-7b9f131d-b680-4785-8826-b0090dddcb31 req-7ac8de5b-6e11-45e5-87f8-a176bfb15bef service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Refreshing network info cache for port 6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1676.656761] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.657261] env[62619]: DEBUG nova.compute.manager [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1676.660098] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.362s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.660296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.669627] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.548s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.669848] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.671769] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.561s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.671988] env[62619]: DEBUG nova.objects.instance [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lazy-loading 'resources' on Instance uuid 1257a23e-3beb-4357-9322-4b84c87d0c35 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1676.711649] env[62619]: INFO nova.scheduler.client.report [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleted allocations for instance 4763e489-5aeb-4dc0-b327-b79a55afdfe3 [ 1676.719446] env[62619]: INFO nova.scheduler.client.report [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted allocations for instance fdde42eb-766c-4549-aae5-f7b1a1097cc6 [ 1676.864335] env[62619]: DEBUG oslo_vmware.api [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777990, 'name': PowerOffVM_Task, 'duration_secs': 0.257673} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.864488] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1676.864734] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1676.865045] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b53701cd-6dad-4d19-a23d-a50807cb5c2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.919170] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c385336d-c9dd-4ff2-b112-c5da54ee1a91 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.942138] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0ca5ea-ad24-41c3-add5-4f512f59be93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.963883] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance 'cbff225f-2d11-4a43-a320-95dd3afb8e48' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1677.003035] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070198} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.003035] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1677.003859] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8445a1d-4adb-44cc-bb55-601707a10f09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.023890] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 8c296f2c-3e47-4431-b0c0-f7f1706c4a12/8c296f2c-3e47-4431-b0c0-f7f1706c4a12.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1677.024525] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84524e1d-10f0-4455-b6ca-daf8ada6a91a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.044873] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1677.044873] env[62619]: value = "task-1777994" [ 1677.044873] env[62619]: _type = "Task" [ 1677.044873] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.056207] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777994, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.094518] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1677.094814] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1677.097467] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Deleting the datastore file [datastore1] 20d62152-3859-4023-a11d-b17c76e1090a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1677.097467] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1765c73-f5ff-491c-83c2-fa638da99d56 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.104056] env[62619]: DEBUG oslo_vmware.api [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for the task: (returnval){ [ 1677.104056] env[62619]: value = "task-1777995" [ 1677.104056] env[62619]: _type = "Task" [ 1677.104056] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.116511] env[62619]: DEBUG oslo_vmware.api [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777995, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.122829] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777992, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.180353] env[62619]: DEBUG nova.compute.utils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1677.181727] env[62619]: DEBUG nova.compute.manager [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1677.181925] env[62619]: DEBUG nova.network.neutron [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1677.229886] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb0b0533-44d5-4bb6-94d2-230a1abbf404 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "fdde42eb-766c-4549-aae5-f7b1a1097cc6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.457s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.234315] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8673d682-a570-4a39-9cd4-c4da89ccbd21 tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "4763e489-5aeb-4dc0-b327-b79a55afdfe3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.940s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.315519] env[62619]: DEBUG nova.policy [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8d937f303584c3daea133a6283fd5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23d77e73a09d492695fbfe6ac2c93371', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1677.557740] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777994, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.562017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7b4618-e816-488f-b364-5c9515e5baf3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.573676] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795142ea-142d-4201-96e7-5274385e05f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.610923] env[62619]: DEBUG nova.network.neutron [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Port f289fb71-1285-4a29-9580-10815cd08cba binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1677.621800] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0669910a-e0d1-4914-9bd0-89063b874aa9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.629373] env[62619]: DEBUG oslo_vmware.api [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Task: {'id': task-1777995, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194098} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.634878] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1677.635140] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1677.635349] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1677.635539] env[62619]: INFO nova.compute.manager [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1677.636249] env[62619]: DEBUG oslo.service.loopingcall [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1677.636249] env[62619]: DEBUG oslo_vmware.api [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1777992, 'name': PowerOnVM_Task, 'duration_secs': 0.878295} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.636531] env[62619]: DEBUG nova.compute.manager [-] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1677.636628] env[62619]: DEBUG nova.network.neutron [-] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1677.639260] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32cc2c0-5ab2-45b6-9582-158b8139d4d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.643489] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1677.643681] env[62619]: INFO nova.compute.manager [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Took 8.86 seconds to spawn the instance on the hypervisor. [ 1677.643829] env[62619]: DEBUG nova.compute.manager [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1677.644919] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe9369a-77f7-40bf-8d14-87e7aa92935f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.662070] env[62619]: DEBUG nova.compute.provider_tree [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1677.686150] env[62619]: DEBUG nova.compute.manager [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1677.819498] env[62619]: DEBUG nova.network.neutron [req-7b9f131d-b680-4785-8826-b0090dddcb31 req-7ac8de5b-6e11-45e5-87f8-a176bfb15bef service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Updated VIF entry in instance network info cache for port 6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1677.819868] env[62619]: DEBUG nova.network.neutron [req-7b9f131d-b680-4785-8826-b0090dddcb31 req-7ac8de5b-6e11-45e5-87f8-a176bfb15bef service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Updating instance_info_cache with network_info: [{"id": "6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81", "address": "fa:16:3e:a3:be:5f", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ee41f5a-d2", "ovs_interfaceid": "6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.888567] env[62619]: DEBUG nova.network.neutron [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Successfully updated port: 24c2e90b-5ba7-4ae7-92a8-0666ce5bd797 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1678.035909] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "ac03bcf3-61df-4557-8018-0ad54ef30f17" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.036506] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "ac03bcf3-61df-4557-8018-0ad54ef30f17" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.036835] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "ac03bcf3-61df-4557-8018-0ad54ef30f17-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.037169] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "ac03bcf3-61df-4557-8018-0ad54ef30f17-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.038783] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "ac03bcf3-61df-4557-8018-0ad54ef30f17-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.039976] env[62619]: INFO nova.compute.manager [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Terminating instance [ 1678.062631] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777994, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.138663] env[62619]: DEBUG nova.network.neutron [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Successfully created port: db7fd312-3521-4a87-9acc-4d86d518b63c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1678.171166] env[62619]: DEBUG nova.scheduler.client.report [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1678.176580] env[62619]: INFO nova.compute.manager [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Took 40.99 seconds to build instance. [ 1678.323270] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b9f131d-b680-4785-8826-b0090dddcb31 req-7ac8de5b-6e11-45e5-87f8-a176bfb15bef service nova] Releasing lock "refresh_cache-11869077-b428-413f-9f8f-7eac08d2d9ec" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.393452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "refresh_cache-b3d9c418-f521-4770-a381-5238be6cc33c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.393643] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "refresh_cache-b3d9c418-f521-4770-a381-5238be6cc33c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.393845] env[62619]: DEBUG nova.network.neutron [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1678.493291] env[62619]: DEBUG nova.network.neutron [-] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.550693] env[62619]: DEBUG nova.compute.manager [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1678.551184] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1678.556057] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c24342-5738-42b1-8f48-d2d152fe92b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.566750] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1678.569570] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c319901-1417-444b-9735-1d0c0bbdca96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.575379] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777994, 'name': ReconfigVM_Task, 'duration_secs': 1.424383} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.575379] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 8c296f2c-3e47-4431-b0c0-f7f1706c4a12/8c296f2c-3e47-4431-b0c0-f7f1706c4a12.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1678.575379] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae1dc86f-01b0-439b-83bc-aff379347cb8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.577379] env[62619]: DEBUG oslo_vmware.api [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1678.577379] env[62619]: value = "task-1777996" [ 1678.577379] env[62619]: _type = "Task" [ 1678.577379] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.581506] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1678.581506] env[62619]: value = "task-1777997" [ 1678.581506] env[62619]: _type = "Task" [ 1678.581506] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.593945] env[62619]: DEBUG oslo_vmware.api [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777996, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.601362] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777997, 'name': Rename_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.615480] env[62619]: DEBUG nova.compute.manager [req-fda0d987-8630-426a-a001-e93ac35b3ace req-d2730171-8786-435a-88a9-28edd7bc150c service nova] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Received event network-vif-deleted-e6097edc-d3af-47c0-a0fc-fc2f2b6a9b20 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1678.655686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "cbff225f-2d11-4a43-a320-95dd3afb8e48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.655686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.004s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.655999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.677147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.005s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.679779] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.763s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.680022] env[62619]: DEBUG nova.objects.instance [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lazy-loading 'resources' on Instance uuid d0258646-e687-4198-b7c8-7bd116e3bf18 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1678.681084] env[62619]: DEBUG oslo_concurrency.lockutils [None req-89f9276d-9811-499a-ad8f-c2e83026eddc tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "cb33580e-d70d-4557-98fe-e673d93f3307" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.506s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.701040] env[62619]: DEBUG nova.compute.manager [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1678.713444] env[62619]: INFO nova.scheduler.client.report [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted allocations for instance 1257a23e-3beb-4357-9322-4b84c87d0c35 [ 1678.729942] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1678.730209] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1678.730362] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1678.730535] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1678.730675] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1678.730815] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1678.731148] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1678.731366] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1678.731549] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1678.731710] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1678.731879] env[62619]: DEBUG nova.virt.hardware [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1678.733172] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1d5311-9591-4e07-a11f-93c9197eca84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.742496] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0eeacdb-e260-4bcc-bbb8-44d8732c590a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.942468] env[62619]: DEBUG nova.network.neutron [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1678.997623] env[62619]: INFO nova.compute.manager [-] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Took 1.36 seconds to deallocate network for instance. [ 1679.070750] env[62619]: DEBUG nova.compute.manager [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Received event network-vif-plugged-24c2e90b-5ba7-4ae7-92a8-0666ce5bd797 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1679.070973] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] Acquiring lock "b3d9c418-f521-4770-a381-5238be6cc33c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.075045] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] Lock "b3d9c418-f521-4770-a381-5238be6cc33c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.075045] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] Lock "b3d9c418-f521-4770-a381-5238be6cc33c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.075045] env[62619]: DEBUG nova.compute.manager [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] No waiting events found dispatching network-vif-plugged-24c2e90b-5ba7-4ae7-92a8-0666ce5bd797 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1679.075045] env[62619]: WARNING nova.compute.manager [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Received unexpected event network-vif-plugged-24c2e90b-5ba7-4ae7-92a8-0666ce5bd797 for instance with vm_state building and task_state spawning. [ 1679.075045] env[62619]: DEBUG nova.compute.manager [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Received event network-changed-24c2e90b-5ba7-4ae7-92a8-0666ce5bd797 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1679.075045] env[62619]: DEBUG nova.compute.manager [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Refreshing instance network info cache due to event network-changed-24c2e90b-5ba7-4ae7-92a8-0666ce5bd797. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1679.075045] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] Acquiring lock "refresh_cache-b3d9c418-f521-4770-a381-5238be6cc33c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.095517] env[62619]: DEBUG oslo_vmware.api [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1777996, 'name': PowerOffVM_Task, 'duration_secs': 0.370751} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.099507] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1679.099507] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1679.099507] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777997, 'name': Rename_Task, 'duration_secs': 0.285378} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.099669] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0f03208-f4d2-4e62-947c-bb6809801906 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.101224] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1679.101466] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4f101f7-54bf-4232-98c7-6463cd0dda08 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.110042] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1679.110042] env[62619]: value = "task-1777999" [ 1679.110042] env[62619]: _type = "Task" [ 1679.110042] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.123560] env[62619]: DEBUG nova.network.neutron [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Updating instance_info_cache with network_info: [{"id": "24c2e90b-5ba7-4ae7-92a8-0666ce5bd797", "address": "fa:16:3e:6d:78:60", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c2e90b-5b", "ovs_interfaceid": "24c2e90b-5ba7-4ae7-92a8-0666ce5bd797", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1679.195759] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1679.195759] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1679.195759] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleting the datastore file [datastore1] ac03bcf3-61df-4557-8018-0ad54ef30f17 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1679.195759] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88034835-7d58-4f91-a0b0-a53993541162 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.201438] env[62619]: DEBUG oslo_vmware.api [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for the task: (returnval){ [ 1679.201438] env[62619]: value = "task-1778000" [ 1679.201438] env[62619]: _type = "Task" [ 1679.201438] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.211921] env[62619]: DEBUG oslo_vmware.api [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1778000, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.224142] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eacb1676-9211-45e0-b248-b22791096e86 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1257a23e-3beb-4357-9322-4b84c87d0c35" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.610s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.498835] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dcec56-1736-4428-b67f-04107bd3f82e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.508669] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.512881] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c3d44b-02ee-489e-ae66-f2608d220e52 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.560857] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16325325-8f1f-44c2-813f-e44c3e73cdfe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.573026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056e0cfc-2cad-4405-a8a2-c526aeb857c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.589449] env[62619]: DEBUG nova.compute.provider_tree [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1679.623427] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777999, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.626745] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "refresh_cache-b3d9c418-f521-4770-a381-5238be6cc33c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.627114] env[62619]: DEBUG nova.compute.manager [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Instance network_info: |[{"id": "24c2e90b-5ba7-4ae7-92a8-0666ce5bd797", "address": "fa:16:3e:6d:78:60", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c2e90b-5b", "ovs_interfaceid": "24c2e90b-5ba7-4ae7-92a8-0666ce5bd797", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1679.627407] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] Acquired lock "refresh_cache-b3d9c418-f521-4770-a381-5238be6cc33c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.627585] env[62619]: DEBUG nova.network.neutron [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Refreshing network info cache for port 24c2e90b-5ba7-4ae7-92a8-0666ce5bd797 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1679.628890] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:78:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a27fd90b-16a5-43af-bede-ae36762ece00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24c2e90b-5ba7-4ae7-92a8-0666ce5bd797', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1679.637526] env[62619]: DEBUG oslo.service.loopingcall [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1679.640666] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1679.643903] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc4f02b4-bc59-4b32-b22f-9682dd60c590 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.664610] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1679.664610] env[62619]: value = "task-1778001" [ 1679.664610] env[62619]: _type = "Task" [ 1679.664610] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.674354] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778001, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.713106] env[62619]: DEBUG oslo_vmware.api [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Task: {'id': task-1778000, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18931} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.713106] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1679.713106] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1679.713106] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1679.713106] env[62619]: INFO nova.compute.manager [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1679.713106] env[62619]: DEBUG oslo.service.loopingcall [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1679.713560] env[62619]: DEBUG nova.compute.manager [-] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1679.713560] env[62619]: DEBUG nova.network.neutron [-] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1679.727950] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.727950] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.728103] env[62619]: DEBUG nova.network.neutron [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1679.988306] env[62619]: DEBUG nova.network.neutron [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Updated VIF entry in instance network info cache for port 24c2e90b-5ba7-4ae7-92a8-0666ce5bd797. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1679.988726] env[62619]: DEBUG nova.network.neutron [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Updating instance_info_cache with network_info: [{"id": "24c2e90b-5ba7-4ae7-92a8-0666ce5bd797", "address": "fa:16:3e:6d:78:60", "network": {"id": "7bc2fa7e-84fd-4916-9af1-aa767e1e38b8", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1746046422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0948c27a2b08413ba82d553452965c9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c2e90b-5b", "ovs_interfaceid": "24c2e90b-5ba7-4ae7-92a8-0666ce5bd797", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.019953] env[62619]: DEBUG oslo_concurrency.lockutils [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "cb33580e-d70d-4557-98fe-e673d93f3307" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.020658] env[62619]: DEBUG oslo_concurrency.lockutils [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "cb33580e-d70d-4557-98fe-e673d93f3307" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.020658] env[62619]: DEBUG oslo_concurrency.lockutils [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "cb33580e-d70d-4557-98fe-e673d93f3307-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.021431] env[62619]: DEBUG oslo_concurrency.lockutils [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "cb33580e-d70d-4557-98fe-e673d93f3307-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.021431] env[62619]: DEBUG oslo_concurrency.lockutils [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "cb33580e-d70d-4557-98fe-e673d93f3307-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.023225] env[62619]: INFO nova.compute.manager [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Terminating instance [ 1680.093568] env[62619]: DEBUG nova.scheduler.client.report [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1680.097890] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "e34a8173-952b-4ddc-90cf-3681387733fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.098153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e34a8173-952b-4ddc-90cf-3681387733fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.127982] env[62619]: DEBUG oslo_vmware.api [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1777999, 'name': PowerOnVM_Task, 'duration_secs': 0.705911} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.128293] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1680.128970] env[62619]: INFO nova.compute.manager [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Took 6.36 seconds to spawn the instance on the hypervisor. [ 1680.129303] env[62619]: DEBUG nova.compute.manager [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1680.130083] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20c8238-622e-44f5-af07-02631f302522 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.175551] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778001, 'name': CreateVM_Task, 'duration_secs': 0.458851} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.175712] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1680.176456] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.176623] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.176948] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1680.178150] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0480cb88-a141-4c02-bc82-8bef42eeff28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.183474] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1680.183474] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52400870-798f-3649-fae8-0b28094993e1" [ 1680.183474] env[62619]: _type = "Task" [ 1680.183474] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.197282] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52400870-798f-3649-fae8-0b28094993e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.445433] env[62619]: DEBUG nova.network.neutron [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance_info_cache with network_info: [{"id": "f289fb71-1285-4a29-9580-10815cd08cba", "address": "fa:16:3e:bf:f4:72", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf289fb71-12", "ovs_interfaceid": "f289fb71-1285-4a29-9580-10815cd08cba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.494364] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a4b4d88-5598-4a29-b3ce-01c8f9902181 req-5e0a1ea7-cf2b-43e2-b3a9-8ac3d1b5e8c3 service nova] Releasing lock "refresh_cache-b3d9c418-f521-4770-a381-5238be6cc33c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.519237] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "34180944-02f7-4115-8178-64f2f2591080" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.519237] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "34180944-02f7-4115-8178-64f2f2591080" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.528556] env[62619]: DEBUG nova.compute.manager [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1680.528556] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1680.528556] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00af065-1c8c-4ed7-8a1d-a75f3d8dfd3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.537079] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1680.537341] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1c73980-5a08-4852-b837-d5a6ef55beb3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.543618] env[62619]: DEBUG oslo_vmware.api [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1680.543618] env[62619]: value = "task-1778005" [ 1680.543618] env[62619]: _type = "Task" [ 1680.543618] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.553053] env[62619]: DEBUG oslo_vmware.api [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1778005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.600486] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.921s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.602783] env[62619]: DEBUG nova.compute.manager [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1680.607752] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.607s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.645395] env[62619]: INFO nova.scheduler.client.report [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Deleted allocations for instance d0258646-e687-4198-b7c8-7bd116e3bf18 [ 1680.655366] env[62619]: DEBUG nova.network.neutron [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Successfully updated port: db7fd312-3521-4a87-9acc-4d86d518b63c {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1680.656372] env[62619]: DEBUG nova.network.neutron [-] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.659558] env[62619]: DEBUG nova.compute.manager [req-437df966-2401-4db0-b33e-182ed1ea57d9 req-af8b1434-fe64-4f45-92f1-5efa7f4fcbdc service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Received event network-vif-deleted-b6faf342-2332-4eee-bdde-dafce4f0a856 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1680.659558] env[62619]: INFO nova.compute.manager [req-437df966-2401-4db0-b33e-182ed1ea57d9 req-af8b1434-fe64-4f45-92f1-5efa7f4fcbdc service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Neutron deleted interface b6faf342-2332-4eee-bdde-dafce4f0a856; detaching it from the instance and deleting it from the info cache [ 1680.659558] env[62619]: DEBUG nova.network.neutron [req-437df966-2401-4db0-b33e-182ed1ea57d9 req-af8b1434-fe64-4f45-92f1-5efa7f4fcbdc service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.662328] env[62619]: INFO nova.compute.manager [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Took 36.77 seconds to build instance. [ 1680.694016] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52400870-798f-3649-fae8-0b28094993e1, 'name': SearchDatastore_Task, 'duration_secs': 0.013547} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.694349] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.694545] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1680.694774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.694913] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.695099] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1680.695400] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4feaca41-1815-47f0-a43d-78f867b692b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.704526] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1680.704722] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1680.705469] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6c6583c-b60e-4be8-a9ee-e6cc32a29f04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.711181] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1680.711181] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521f3615-6524-552b-ffde-2295213a1368" [ 1680.711181] env[62619]: _type = "Task" [ 1680.711181] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.723141] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521f3615-6524-552b-ffde-2295213a1368, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.947514] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.020920] env[62619]: DEBUG nova.compute.manager [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1681.054384] env[62619]: DEBUG oslo_vmware.api [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1778005, 'name': PowerOffVM_Task, 'duration_secs': 0.172455} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.054738] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1681.054946] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1681.055295] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-454ece34-bcf5-4d17-aabf-cb3f33c086ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.094635] env[62619]: DEBUG nova.compute.manager [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Received event network-vif-plugged-db7fd312-3521-4a87-9acc-4d86d518b63c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1681.094853] env[62619]: DEBUG oslo_concurrency.lockutils [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] Acquiring lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.095069] env[62619]: DEBUG oslo_concurrency.lockutils [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] Lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.095233] env[62619]: DEBUG oslo_concurrency.lockutils [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] Lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.095431] env[62619]: DEBUG nova.compute.manager [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] No waiting events found dispatching network-vif-plugged-db7fd312-3521-4a87-9acc-4d86d518b63c {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1681.095660] env[62619]: WARNING nova.compute.manager [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Received unexpected event network-vif-plugged-db7fd312-3521-4a87-9acc-4d86d518b63c for instance with vm_state building and task_state spawning. [ 1681.096326] env[62619]: DEBUG nova.compute.manager [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Received event network-changed-db7fd312-3521-4a87-9acc-4d86d518b63c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1681.096326] env[62619]: DEBUG nova.compute.manager [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Refreshing instance network info cache due to event network-changed-db7fd312-3521-4a87-9acc-4d86d518b63c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1681.096326] env[62619]: DEBUG oslo_concurrency.lockutils [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] Acquiring lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1681.096709] env[62619]: DEBUG oslo_concurrency.lockutils [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] Acquired lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.096709] env[62619]: DEBUG nova.network.neutron [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Refreshing network info cache for port db7fd312-3521-4a87-9acc-4d86d518b63c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1681.132678] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.142353] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1681.142353] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1681.142556] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Deleting the datastore file [datastore1] cb33580e-d70d-4557-98fe-e673d93f3307 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1681.142799] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f810ac8b-6da0-4f85-94d3-0a55d7f33204 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.152499] env[62619]: DEBUG oslo_vmware.api [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for the task: (returnval){ [ 1681.152499] env[62619]: value = "task-1778007" [ 1681.152499] env[62619]: _type = "Task" [ 1681.152499] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.159291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6a7231ed-87e9-43d8-9d50-3d25b03e9f3a tempest-InstanceActionsNegativeTestJSON-75633597 tempest-InstanceActionsNegativeTestJSON-75633597-project-member] Lock "d0258646-e687-4198-b7c8-7bd116e3bf18" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.635s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.167175] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1681.167537] env[62619]: INFO nova.compute.manager [-] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Took 1.45 seconds to deallocate network for instance. [ 1681.168021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be94ce9f-dc60-48c4-8ba5-f497d59affa2 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "8c296f2c-3e47-4431-b0c0-f7f1706c4a12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.280s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.168452] env[62619]: DEBUG oslo_vmware.api [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1778007, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.175308] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-535f722c-5430-4fa2-ba87-4b6aed07c8a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.188806] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c360fdf-8e0e-4c7a-ae8d-cb95df905a6e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.230098] env[62619]: DEBUG nova.compute.manager [req-437df966-2401-4db0-b33e-182ed1ea57d9 req-af8b1434-fe64-4f45-92f1-5efa7f4fcbdc service nova] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Detach interface failed, port_id=b6faf342-2332-4eee-bdde-dafce4f0a856, reason: Instance ac03bcf3-61df-4557-8018-0ad54ef30f17 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1681.239783] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521f3615-6524-552b-ffde-2295213a1368, 'name': SearchDatastore_Task, 'duration_secs': 0.008353} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.240615] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff476c0b-4e1f-4b3b-ad95-993eeeb8f993 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.245593] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1681.245593] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52933776-9386-759a-d5b2-92bf92b7bf70" [ 1681.245593] env[62619]: _type = "Task" [ 1681.245593] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.257618] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52933776-9386-759a-d5b2-92bf92b7bf70, 'name': SearchDatastore_Task, 'duration_secs': 0.009097} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.258010] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.258293] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] b3d9c418-f521-4770-a381-5238be6cc33c/b3d9c418-f521-4770-a381-5238be6cc33c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1681.258570] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29819b40-ef0f-4fd0-bc2c-ea2547c96480 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.270813] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1681.270813] env[62619]: value = "task-1778008" [ 1681.270813] env[62619]: _type = "Task" [ 1681.270813] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.282801] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.443495] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198e04b8-2499-4215-ae52-af1d504474dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.451952] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffe1ca0-0ce7-4259-8f65-e9d83c067766 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.492415] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771e2bcb-32f1-4521-a1d0-47824f63e91d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.495884] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501a8aea-ea90-4812-ae43-c03b24b22a8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.517447] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178f8fb2-ee1a-4504-9bc9-7a70f5658103 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.521434] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f9d072-9453-4285-b0a6-6eaec3d60eeb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.533655] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance 'cbff225f-2d11-4a43-a320-95dd3afb8e48' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1681.549072] env[62619]: DEBUG nova.compute.provider_tree [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1681.551974] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.657766] env[62619]: DEBUG nova.network.neutron [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1681.666575] env[62619]: DEBUG oslo_vmware.api [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Task: {'id': task-1778007, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139958} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.666871] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1681.667209] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1681.667284] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1681.667406] env[62619]: INFO nova.compute.manager [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1681.667785] env[62619]: DEBUG oslo.service.loopingcall [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1681.667876] env[62619]: DEBUG nova.compute.manager [-] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1681.667972] env[62619]: DEBUG nova.network.neutron [-] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1681.679906] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.780921] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778008, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446667} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.781206] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] b3d9c418-f521-4770-a381-5238be6cc33c/b3d9c418-f521-4770-a381-5238be6cc33c.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1681.781416] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1681.781665] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8df14389-0b29-48d1-b14d-7ee602c24686 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.788499] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1681.788499] env[62619]: value = "task-1778009" [ 1681.788499] env[62619]: _type = "Task" [ 1681.788499] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.796098] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778009, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.833952] env[62619]: INFO nova.compute.manager [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Rebuilding instance [ 1681.878672] env[62619]: DEBUG nova.network.neutron [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1681.881790] env[62619]: DEBUG nova.compute.manager [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1681.882754] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0bbecc-b8b5-41a6-a789-2564f6e6c4be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.042165] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1682.042440] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc208eb0-619b-44b1-91d9-40453b3d9d3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.060327] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1682.060327] env[62619]: value = "task-1778010" [ 1682.060327] env[62619]: _type = "Task" [ 1682.060327] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.071063] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778010, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.090923] env[62619]: ERROR nova.scheduler.client.report [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [req-66334fa7-63c0-4dec-a6f4-19873ac45d51] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-66334fa7-63c0-4dec-a6f4-19873ac45d51"}]} [ 1682.114112] env[62619]: DEBUG nova.scheduler.client.report [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1682.131516] env[62619]: DEBUG nova.scheduler.client.report [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1682.131966] env[62619]: DEBUG nova.compute.provider_tree [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1682.151925] env[62619]: DEBUG nova.scheduler.client.report [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1682.175636] env[62619]: DEBUG nova.scheduler.client.report [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1682.299376] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778009, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06727} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.299565] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1682.300397] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de9006e-99d9-4ab7-b4c3-6801d96f6d80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.324306] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] b3d9c418-f521-4770-a381-5238be6cc33c/b3d9c418-f521-4770-a381-5238be6cc33c.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1682.327923] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3471ab2a-11e4-4e7f-9fff-22a78bf060ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.347895] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1682.347895] env[62619]: value = "task-1778011" [ 1682.347895] env[62619]: _type = "Task" [ 1682.347895] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.357079] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.383176] env[62619]: DEBUG oslo_concurrency.lockutils [req-98e4babe-6392-492d-8c15-00c7c7680ef4 req-82f7d3b5-1779-4c4c-b1b5-8071b2539b57 service nova] Releasing lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1682.383504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1682.383816] env[62619]: DEBUG nova.network.neutron [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1682.448324] env[62619]: DEBUG nova.network.neutron [-] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.550114] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e705680-541c-45a6-8852-ac4d85b5c98b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.567942] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1537d6c8-70e1-484a-be4c-1d34983a5ba3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.576731] env[62619]: DEBUG oslo_vmware.api [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778010, 'name': PowerOnVM_Task, 'duration_secs': 0.453208} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.577489] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1682.577682] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6af70d22-d7ef-4f2b-a98c-0c0a27e30d93 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance 'cbff225f-2d11-4a43-a320-95dd3afb8e48' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1682.613746] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc77008-e8b2-4222-baf8-e04d67222f72 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.631022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ad801f-8e00-42e8-8c58-7d9de6e23263 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.648366] env[62619]: DEBUG nova.compute.provider_tree [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1682.837979] env[62619]: DEBUG nova.compute.manager [req-f6841e12-c7f4-4be0-a3ec-6e11a08f2922 req-6205581f-bef7-444e-88c5-efdfa8377764 service nova] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Received event network-vif-deleted-a779a2e4-0edc-4f00-ad83-a86f4cc102c8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1682.860662] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778011, 'name': ReconfigVM_Task, 'duration_secs': 0.357426} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.860956] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Reconfigured VM instance instance-0000004e to attach disk [datastore1] b3d9c418-f521-4770-a381-5238be6cc33c/b3d9c418-f521-4770-a381-5238be6cc33c.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1682.861591] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76143834-e37e-4c00-bf45-bac50574ea38 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.868194] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1682.868194] env[62619]: value = "task-1778013" [ 1682.868194] env[62619]: _type = "Task" [ 1682.868194] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.879887] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778013, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.896144] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1682.896458] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0715d97-d3a3-4efd-bb9c-51793c4bbef6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.907393] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1682.907393] env[62619]: value = "task-1778014" [ 1682.907393] env[62619]: _type = "Task" [ 1682.907393] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.915532] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778014, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.921135] env[62619]: DEBUG nova.network.neutron [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1682.958375] env[62619]: INFO nova.compute.manager [-] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Took 1.29 seconds to deallocate network for instance. [ 1683.087016] env[62619]: DEBUG nova.network.neutron [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updating instance_info_cache with network_info: [{"id": "db7fd312-3521-4a87-9acc-4d86d518b63c", "address": "fa:16:3e:14:06:4e", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb7fd312-35", "ovs_interfaceid": "db7fd312-3521-4a87-9acc-4d86d518b63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.153758] env[62619]: DEBUG nova.scheduler.client.report [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1683.244167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.244449] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.244637] env[62619]: INFO nova.compute.manager [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Shelving [ 1683.378514] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778013, 'name': Rename_Task, 'duration_secs': 0.158361} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.378781] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1683.379037] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7bb64b5f-a596-40cd-9891-2d789e03de0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.385540] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1683.385540] env[62619]: value = "task-1778015" [ 1683.385540] env[62619]: _type = "Task" [ 1683.385540] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.393393] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778015, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.418636] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778014, 'name': PowerOffVM_Task, 'duration_secs': 0.144386} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.418906] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1683.419138] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1683.419973] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e647533-c094-4078-bdcc-1a206301c585 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.427593] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1683.432272] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b198ecdd-57bd-489d-9a26-2d879dc6d432 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.457898] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1683.457898] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1683.458099] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Deleting the datastore file [datastore1] 8c296f2c-3e47-4431-b0c0-f7f1706c4a12 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1683.461178] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-090bcc0c-4cf2-4691-b3b8-0f11daba7ed4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.466517] env[62619]: DEBUG oslo_concurrency.lockutils [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.466956] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1683.466956] env[62619]: value = "task-1778017" [ 1683.466956] env[62619]: _type = "Task" [ 1683.466956] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.480061] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.594873] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1683.597018] env[62619]: DEBUG nova.compute.manager [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Instance network_info: |[{"id": "db7fd312-3521-4a87-9acc-4d86d518b63c", "address": "fa:16:3e:14:06:4e", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb7fd312-35", "ovs_interfaceid": "db7fd312-3521-4a87-9acc-4d86d518b63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1683.597018] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:06:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9aa05ef8-c7bb-4af5-983f-bfa0f3f88223', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db7fd312-3521-4a87-9acc-4d86d518b63c', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1683.606216] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Creating folder: Project (23d77e73a09d492695fbfe6ac2c93371). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1683.606216] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79177358-c80a-455a-b1c4-a639a518ed3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.617964] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Created folder: Project (23d77e73a09d492695fbfe6ac2c93371) in parent group-v368875. [ 1683.618284] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Creating folder: Instances. Parent ref: group-v369089. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1683.618603] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03c5485d-59ac-4e03-a257-f75e7f70f7cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.628213] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Created folder: Instances in parent group-v369089. [ 1683.628348] env[62619]: DEBUG oslo.service.loopingcall [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.628504] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1683.628721] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77f7e272-406b-44a6-b676-87fef9a5a152 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.649573] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1683.649573] env[62619]: value = "task-1778020" [ 1683.649573] env[62619]: _type = "Task" [ 1683.649573] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.658029] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778020, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.899451] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778015, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.977700] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.463638} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.978047] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.978254] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1683.978436] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1684.160660] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778020, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.165587] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.559s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.165997] env[62619]: DEBUG nova.compute.manager [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62619) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5283}} [ 1684.168898] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.361s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.171042] env[62619]: DEBUG nova.objects.instance [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lazy-loading 'resources' on Instance uuid aa4906f1-e801-4df0-819e-8c5fb5930fb5 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1684.260019] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1684.260352] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42e249f7-8277-449d-b3e0-eb602dd25fac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.268640] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1684.268640] env[62619]: value = "task-1778021" [ 1684.268640] env[62619]: _type = "Task" [ 1684.268640] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.276704] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.397774] env[62619]: DEBUG oslo_vmware.api [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778015, 'name': PowerOnVM_Task, 'duration_secs': 0.622047} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.397774] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1684.397774] env[62619]: INFO nova.compute.manager [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Took 8.17 seconds to spawn the instance on the hypervisor. [ 1684.397774] env[62619]: DEBUG nova.compute.manager [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1684.398354] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4589a4f5-7626-4f6e-ab13-fdc79aa76903 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.531508] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "cbff225f-2d11-4a43-a320-95dd3afb8e48" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.531776] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.531957] env[62619]: DEBUG nova.compute.manager [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Going to confirm migration 4 {{(pid=62619) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1684.659916] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778020, 'name': CreateVM_Task, 'duration_secs': 0.773919} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.660334] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1684.660791] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.660950] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.661321] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1684.661550] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bbff413-0fb3-4af3-a4e5-c87d6742b329 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.666335] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1684.666335] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522fcca5-1582-67dd-05e7-516b81e7d4e5" [ 1684.666335] env[62619]: _type = "Task" [ 1684.666335] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.678258] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522fcca5-1582-67dd-05e7-516b81e7d4e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.732531] env[62619]: INFO nova.scheduler.client.report [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleted allocation for migration 7a58e51e-e1cb-4fe5-a12d-73e2a613ed67 [ 1684.787859] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778021, 'name': PowerOffVM_Task, 'duration_secs': 0.388315} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.788422] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1684.789299] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876e4250-685c-4fb9-9356-b2ba33c98c01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.817983] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e38e8d9-23d7-4394-b416-c2d5ea578379 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.916016] env[62619]: INFO nova.compute.manager [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Took 39.38 seconds to build instance. [ 1685.009116] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc791d6-3f5d-449d-ad30-241755f7c899 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.022652] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd97d2d9-1fc1-444d-bdb1-6e50a46ba0bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.033675] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1685.033924] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1685.034098] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1685.034288] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1685.034432] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1685.034583] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1685.034786] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1685.035054] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1685.035122] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1685.035259] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1685.035454] env[62619]: DEBUG nova.virt.hardware [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1685.038489] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77305737-5bd5-421e-9baf-bd429d9a1905 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.075062] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d3453e-b5ae-4a97-af0a-87e361b99e8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.080309] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64263466-2392-4caa-a068-9c5da73384c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.095388] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-548af5ee-e81a-43f8-b5dd-a1a8c6627944 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.099572] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1685.105221] env[62619]: DEBUG oslo.service.loopingcall [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1685.105429] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1685.106054] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec782a6d-6558-4799-9d64-27332cc1a018 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.125733] env[62619]: DEBUG nova.compute.provider_tree [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1685.127946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.128118] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.128284] env[62619]: DEBUG nova.network.neutron [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1685.128470] env[62619]: DEBUG nova.objects.instance [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lazy-loading 'info_cache' on Instance uuid cbff225f-2d11-4a43-a320-95dd3afb8e48 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1685.135762] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1685.135762] env[62619]: value = "task-1778023" [ 1685.135762] env[62619]: _type = "Task" [ 1685.135762] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.144316] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778023, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.177174] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522fcca5-1582-67dd-05e7-516b81e7d4e5, 'name': SearchDatastore_Task, 'duration_secs': 0.010824} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.177821] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.177821] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1685.177994] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.178066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.178230] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1685.178489] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-871c9b01-5972-4105-ada0-8ec9f9bc5b1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.188912] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1685.189060] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1685.189799] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5150e018-69c8-4741-86b9-8a8c728d6e50 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.194846] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1685.194846] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e3e83-d074-c55a-e404-22a6434a3fcd" [ 1685.194846] env[62619]: _type = "Task" [ 1685.194846] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.202721] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e3e83-d074-c55a-e404-22a6434a3fcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.240577] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca71ba57-1f94-4a3d-a25f-edfffef74280 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 16.795s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.283572] env[62619]: DEBUG nova.objects.instance [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'flavor' on Instance uuid da806d3f-79f0-4188-a2d8-0beeb9dfec1a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1685.332651] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1685.333039] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-26e62e8b-8a21-40b9-8779-199e32a29ace {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.341365] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1685.341365] env[62619]: value = "task-1778024" [ 1685.341365] env[62619]: _type = "Task" [ 1685.341365] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.350616] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778024, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.420768] env[62619]: DEBUG oslo_concurrency.lockutils [None req-959c6aea-63b0-4a21-9214-6bd022d1b8c7 tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "b3d9c418-f521-4770-a381-5238be6cc33c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.895s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.632988] env[62619]: DEBUG nova.scheduler.client.report [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1685.647805] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778023, 'name': CreateVM_Task, 'duration_secs': 0.3922} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.648048] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1685.648477] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.650392] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.650392] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1685.650392] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-389ba1fd-cdc4-4135-84b4-48884716124b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.654874] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1685.654874] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526473d6-6227-f78b-3379-82cc43a9a5fc" [ 1685.654874] env[62619]: _type = "Task" [ 1685.654874] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.664821] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526473d6-6227-f78b-3379-82cc43a9a5fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.706791] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e3e83-d074-c55a-e404-22a6434a3fcd, 'name': SearchDatastore_Task, 'duration_secs': 0.025944} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.706791] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ec08af9-8b0a-401e-abcf-4e36c4f9cbfd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.712284] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1685.712284] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c5a51-c633-43f0-4be8-c8ef5f3e3b10" [ 1685.712284] env[62619]: _type = "Task" [ 1685.712284] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.720880] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c5a51-c633-43f0-4be8-c8ef5f3e3b10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.791500] env[62619]: DEBUG oslo_concurrency.lockutils [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.791500] env[62619]: DEBUG oslo_concurrency.lockutils [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.791775] env[62619]: DEBUG nova.network.neutron [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1685.791775] env[62619]: DEBUG nova.objects.instance [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'info_cache' on Instance uuid da806d3f-79f0-4188-a2d8-0beeb9dfec1a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1685.852189] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778024, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.140184] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.971s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.141936] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.633s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.142185] env[62619]: DEBUG nova.objects.instance [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lazy-loading 'resources' on Instance uuid 20d62152-3859-4023-a11d-b17c76e1090a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1686.167493] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526473d6-6227-f78b-3379-82cc43a9a5fc, 'name': SearchDatastore_Task, 'duration_secs': 0.021935} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.169082] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.169082] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1686.169082] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.176869] env[62619]: INFO nova.scheduler.client.report [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Deleted allocations for instance aa4906f1-e801-4df0-819e-8c5fb5930fb5 [ 1686.227775] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524c5a51-c633-43f0-4be8-c8ef5f3e3b10, 'name': SearchDatastore_Task, 'duration_secs': 0.019965} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.228030] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.228282] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4983b333-debb-4a2b-b28d-b321f0d8d7d7/4983b333-debb-4a2b-b28d-b321f0d8d7d7.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1686.228550] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.228722] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1686.228928] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df6e0433-226a-48d9-83a6-3e013521a8af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.230828] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92549704-c13c-46e7-8140-389f73cef785 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.238128] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1686.238128] env[62619]: value = "task-1778025" [ 1686.238128] env[62619]: _type = "Task" [ 1686.238128] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.242821] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1686.243058] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1686.244731] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04d6b3c1-1149-4fb0-b002-271baef4e3d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.251439] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.254953] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1686.254953] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522e2d6a-cd59-d0d9-df30-3085af65bc48" [ 1686.254953] env[62619]: _type = "Task" [ 1686.254953] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.265605] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522e2d6a-cd59-d0d9-df30-3085af65bc48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.295995] env[62619]: DEBUG nova.objects.base [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1686.356521] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778024, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.522290] env[62619]: DEBUG nova.network.neutron [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance_info_cache with network_info: [{"id": "f289fb71-1285-4a29-9580-10815cd08cba", "address": "fa:16:3e:bf:f4:72", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf289fb71-12", "ovs_interfaceid": "f289fb71-1285-4a29-9580-10815cd08cba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.687166] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a16550a-d160-4265-8578-ae3f18d363a4 tempest-MigrationsAdminTest-292515410 tempest-MigrationsAdminTest-292515410-project-member] Lock "aa4906f1-e801-4df0-819e-8c5fb5930fb5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.420s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.748347] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476403} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.748616] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4983b333-debb-4a2b-b28d-b321f0d8d7d7/4983b333-debb-4a2b-b28d-b321f0d8d7d7.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1686.748833] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1686.749096] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79cdf529-6685-4b4d-a9c0-b7cbad5137de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.755454] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1686.755454] env[62619]: value = "task-1778026" [ 1686.755454] env[62619]: _type = "Task" [ 1686.755454] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.782434] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522e2d6a-cd59-d0d9-df30-3085af65bc48, 'name': SearchDatastore_Task, 'duration_secs': 0.009798} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.783535] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.787076] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-274bfc79-9681-4865-aedc-a4ad689fed09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.792525] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1686.792525] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52371149-4413-ff55-b828-851d5aec4484" [ 1686.792525] env[62619]: _type = "Task" [ 1686.792525] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.802726] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52371149-4413-ff55-b828-851d5aec4484, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.856121] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778024, 'name': CreateSnapshot_Task, 'duration_secs': 1.123289} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.856436] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1686.857272] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17d6252-426e-4578-8bfd-e4091d74b72d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.000193] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886cfc8b-d1f0-4f81-bbb5-0db8ffb716b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.009261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4687fb4a-9114-4c63-88a1-cd48ecdf006a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.019560] env[62619]: DEBUG nova.compute.manager [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1687.050537] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e1918c-2c36-4d87-91d0-a5a521b2172f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.053651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-cbff225f-2d11-4a43-a320-95dd3afb8e48" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.053917] env[62619]: DEBUG nova.objects.instance [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lazy-loading 'migration_context' on Instance uuid cbff225f-2d11-4a43-a320-95dd3afb8e48 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1687.060558] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f78a0c-f784-4df7-8270-a3ebf7e38225 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.075776] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3212ee86-e0cb-49a8-8be0-5f04772424b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.091998] env[62619]: DEBUG nova.compute.provider_tree [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1687.204644] env[62619]: DEBUG nova.network.neutron [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance_info_cache with network_info: [{"id": "7df13a55-7d53-41b9-9489-591516bda30c", "address": "fa:16:3e:aa:15:ba", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df13a55-7d", "ovs_interfaceid": "7df13a55-7d53-41b9-9489-591516bda30c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.271607] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.1051} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.272097] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1687.273125] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4299c0da-0dd4-44f7-97f3-07186a86fc87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.296498] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 4983b333-debb-4a2b-b28d-b321f0d8d7d7/4983b333-debb-4a2b-b28d-b321f0d8d7d7.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1687.296860] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff529a17-1b3f-4555-8b92-ff73b7e1b111 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.320472] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52371149-4413-ff55-b828-851d5aec4484, 'name': SearchDatastore_Task, 'duration_secs': 0.020134} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.321756] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.322014] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8c296f2c-3e47-4431-b0c0-f7f1706c4a12/8c296f2c-3e47-4431-b0c0-f7f1706c4a12.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1687.322335] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1687.322335] env[62619]: value = "task-1778028" [ 1687.322335] env[62619]: _type = "Task" [ 1687.322335] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.322511] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68649a83-3295-43a8-947c-4ff8bb4b1f8b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.332612] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778028, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.333095] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1687.333095] env[62619]: value = "task-1778029" [ 1687.333095] env[62619]: _type = "Task" [ 1687.333095] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.341677] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778029, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.379762] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1687.380212] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c7ef2483-31e7-498e-913a-edc448008c0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.388686] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1687.388686] env[62619]: value = "task-1778030" [ 1687.388686] env[62619]: _type = "Task" [ 1687.388686] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.397713] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778030, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.561665] env[62619]: DEBUG nova.objects.base [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1687.562846] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ab1c3a-6921-48b9-b3fa-55a1ddb31201 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.584775] env[62619]: INFO nova.compute.manager [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] instance snapshotting [ 1687.588339] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cd947f8-901a-4087-888c-52131ea45192 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.590220] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac9f1cc-b795-4101-b764-ecd881b79a2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.596933] env[62619]: DEBUG nova.scheduler.client.report [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1687.601991] env[62619]: DEBUG oslo_vmware.api [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1687.601991] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d2589b-ac6e-3967-1eb0-5ab3f10a5b87" [ 1687.601991] env[62619]: _type = "Task" [ 1687.601991] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.633159] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df46e3e-420b-4d8d-ac50-380cdc1c3724 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.653013] env[62619]: DEBUG oslo_vmware.api [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d2589b-ac6e-3967-1eb0-5ab3f10a5b87, 'name': SearchDatastore_Task, 'duration_secs': 0.0183} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.653779] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.706308] env[62619]: DEBUG oslo_concurrency.lockutils [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-da806d3f-79f0-4188-a2d8-0beeb9dfec1a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.837881] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778028, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.847674] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778029, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.903028] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778030, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.104843] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.962s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.108964] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.975s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.109578] env[62619]: INFO nova.compute.claims [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1688.136293] env[62619]: INFO nova.scheduler.client.report [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Deleted allocations for instance 20d62152-3859-4023-a11d-b17c76e1090a [ 1688.156728] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1688.156728] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3e3d3a2b-ddb5-4f02-a497-3d1ee9160031 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.166364] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1688.166364] env[62619]: value = "task-1778031" [ 1688.166364] env[62619]: _type = "Task" [ 1688.166364] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.179409] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778031, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.339285] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778028, 'name': ReconfigVM_Task, 'duration_secs': 0.687596} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.339285] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 4983b333-debb-4a2b-b28d-b321f0d8d7d7/4983b333-debb-4a2b-b28d-b321f0d8d7d7.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1688.339285] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-586f0cc7-cd86-48aa-9bda-ae265c320fde {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.349712] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778029, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.881914} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.352054] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8c296f2c-3e47-4431-b0c0-f7f1706c4a12/8c296f2c-3e47-4431-b0c0-f7f1706c4a12.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1688.353022] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1688.353022] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1688.353022] env[62619]: value = "task-1778032" [ 1688.353022] env[62619]: _type = "Task" [ 1688.353022] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.353022] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f367f32-62be-4e21-b0d9-2e1744b62925 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.371404] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778032, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.372410] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1688.372410] env[62619]: value = "task-1778033" [ 1688.372410] env[62619]: _type = "Task" [ 1688.372410] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.385584] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778033, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.400976] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778030, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.646432] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d434ca1-1797-490a-98af-ebd03400d234 tempest-ServerDiagnosticsNegativeTest-1106494246 tempest-ServerDiagnosticsNegativeTest-1106494246-project-member] Lock "20d62152-3859-4023-a11d-b17c76e1090a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.827s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.680730] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778031, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.711619] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1688.711943] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6891cc10-3612-4376-a139-ec8d924e2d8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.719227] env[62619]: DEBUG oslo_vmware.api [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1688.719227] env[62619]: value = "task-1778034" [ 1688.719227] env[62619]: _type = "Task" [ 1688.719227] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.731387] env[62619]: DEBUG oslo_vmware.api [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778034, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.869319] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778032, 'name': Rename_Task, 'duration_secs': 0.260256} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.870124] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1688.871387] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f47cbdf-40c0-4213-afc6-607e14bb0031 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.886239] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778033, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.188122} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.888757] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1688.889254] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1688.889254] env[62619]: value = "task-1778035" [ 1688.889254] env[62619]: _type = "Task" [ 1688.889254] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.890741] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76f5b00-257c-4220-bab7-4f57f8247415 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.931380] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 8c296f2c-3e47-4431-b0c0-f7f1706c4a12/8c296f2c-3e47-4431-b0c0-f7f1706c4a12.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1688.942171] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c1142a1-8e88-481d-8de9-35114d674174 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.964350] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778030, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.964826] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778035, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.970467] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1688.970467] env[62619]: value = "task-1778036" [ 1688.970467] env[62619]: _type = "Task" [ 1688.970467] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.979987] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778036, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.179697] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778031, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.237038] env[62619]: DEBUG oslo_vmware.api [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778034, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.414187] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778035, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.418779] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778030, 'name': CloneVM_Task, 'duration_secs': 1.783099} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.421767] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Created linked-clone VM from snapshot [ 1689.424307] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412abad2-aba3-40d8-855a-0592b8774c43 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.442377] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Uploading image 034267b5-5870-4201-8726-91111429c131 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1689.465951] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084299eb-d677-4997-911c-e112f17e354c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.478444] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a925b14a-5496-44de-93be-2aef5ba13b0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.488503] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.525291] env[62619]: DEBUG oslo_vmware.rw_handles [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1689.525291] env[62619]: value = "vm-369094" [ 1689.525291] env[62619]: _type = "VirtualMachine" [ 1689.525291] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1689.526262] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6330b77-e0f7-42ba-afac-ebf23e3419ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.529032] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-87e9ee0e-66d0-4455-908e-066e70cc2896 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.538662] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32062b56-287c-4087-9976-d84317d8ee96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.544068] env[62619]: DEBUG oslo_vmware.rw_handles [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lease: (returnval){ [ 1689.544068] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52152427-0fa1-f594-1b17-ea3cc31ee8c5" [ 1689.544068] env[62619]: _type = "HttpNfcLease" [ 1689.544068] env[62619]: } obtained for exporting VM: (result){ [ 1689.544068] env[62619]: value = "vm-369094" [ 1689.544068] env[62619]: _type = "VirtualMachine" [ 1689.544068] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1689.544464] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the lease: (returnval){ [ 1689.544464] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52152427-0fa1-f594-1b17-ea3cc31ee8c5" [ 1689.544464] env[62619]: _type = "HttpNfcLease" [ 1689.544464] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1689.558266] env[62619]: DEBUG nova.compute.provider_tree [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1689.567347] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1689.567347] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52152427-0fa1-f594-1b17-ea3cc31ee8c5" [ 1689.567347] env[62619]: _type = "HttpNfcLease" [ 1689.567347] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1689.678564] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778031, 'name': CreateSnapshot_Task, 'duration_secs': 1.360782} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.678942] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1689.680128] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570d7161-081b-432d-bd1e-df1d809ff6c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.731768] env[62619]: DEBUG oslo_vmware.api [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778034, 'name': PowerOnVM_Task, 'duration_secs': 0.623746} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.732184] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1689.732293] env[62619]: DEBUG nova.compute.manager [None req-23b74cce-f20e-49a7-a050-e63b4d4fadba tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1689.733386] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2bc9bb-a0db-4d7d-a464-539ac664883e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.905956] env[62619]: DEBUG oslo_vmware.api [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778035, 'name': PowerOnVM_Task, 'duration_secs': 0.994641} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.906268] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1689.906509] env[62619]: INFO nova.compute.manager [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Took 11.21 seconds to spawn the instance on the hypervisor. [ 1689.906700] env[62619]: DEBUG nova.compute.manager [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1689.907822] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535bc64b-0c47-4ff5-8c2b-1f798d639a54 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.982374] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778036, 'name': ReconfigVM_Task, 'duration_secs': 0.565543} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.982677] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 8c296f2c-3e47-4431-b0c0-f7f1706c4a12/8c296f2c-3e47-4431-b0c0-f7f1706c4a12.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1689.985950] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2a61f43-ae54-44cd-b970-f9d05aab79ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.993069] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1689.993069] env[62619]: value = "task-1778039" [ 1689.993069] env[62619]: _type = "Task" [ 1689.993069] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.999492] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778039, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.054393] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1690.054393] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52152427-0fa1-f594-1b17-ea3cc31ee8c5" [ 1690.054393] env[62619]: _type = "HttpNfcLease" [ 1690.054393] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1690.054979] env[62619]: DEBUG oslo_vmware.rw_handles [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1690.054979] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52152427-0fa1-f594-1b17-ea3cc31ee8c5" [ 1690.054979] env[62619]: _type = "HttpNfcLease" [ 1690.054979] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1690.056870] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b144dc-7084-4263-a2c1-a6b3b8b00182 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.067296] env[62619]: DEBUG nova.scheduler.client.report [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1690.071195] env[62619]: DEBUG oslo_vmware.rw_handles [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52950467-6d0c-4f6a-aa7f-b529e37f8d71/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1690.071551] env[62619]: DEBUG oslo_vmware.rw_handles [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52950467-6d0c-4f6a-aa7f-b529e37f8d71/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1690.200827] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1690.201176] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f3b61dde-d1b9-472c-8895-1dbd0c51b4af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.209896] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1690.209896] env[62619]: value = "task-1778040" [ 1690.209896] env[62619]: _type = "Task" [ 1690.209896] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.217741] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778040, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.258693] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ffc61842-17da-4565-8a0b-57599e7017cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.431617] env[62619]: INFO nova.compute.manager [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Took 41.29 seconds to build instance. [ 1690.504208] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778039, 'name': Rename_Task, 'duration_secs': 0.373155} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.504800] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1690.506604] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-189e9143-c107-4985-8a0a-c8607603c1b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.514039] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1690.514039] env[62619]: value = "task-1778041" [ 1690.514039] env[62619]: _type = "Task" [ 1690.514039] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.527901] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.575873] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.468s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.576428] env[62619]: DEBUG nova.compute.manager [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1690.579063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.027s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.580462] env[62619]: INFO nova.compute.claims [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1690.728375] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778040, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.937157] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eaee2504-0abf-46e6-a31e-41406f79e55b tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.795s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.029879] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.085232] env[62619]: DEBUG nova.compute.utils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1691.089632] env[62619]: DEBUG nova.compute.manager [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1691.089800] env[62619]: DEBUG nova.network.neutron [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1691.224122] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778040, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.235130] env[62619]: DEBUG nova.policy [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e9094d6b3854c1184307d9bc35a966e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e11e1bca0c747fd8b4a0ca3e220ba4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1691.433110] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.437229] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.437229] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.437229] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.437229] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.438442] env[62619]: INFO nova.compute.manager [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Terminating instance [ 1691.526519] env[62619]: DEBUG oslo_vmware.api [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778041, 'name': PowerOnVM_Task, 'duration_secs': 0.730539} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.528338] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1691.528338] env[62619]: DEBUG nova.compute.manager [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1691.528338] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5bb88d-735c-43ec-b3f2-4dc9e2e5fbab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.590484] env[62619]: DEBUG nova.compute.manager [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1691.730248] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778040, 'name': CloneVM_Task, 'duration_secs': 1.48913} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.731506] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Created linked-clone VM from snapshot [ 1691.733700] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246d4398-408d-45ef-9303-009991427182 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.744357] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Uploading image 54ee24e3-804e-4db8-ad12-2312cca164f9 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1691.761326] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1691.761661] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ac29e360-f02b-4850-b655-fed012bf61c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.769027] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1691.769027] env[62619]: value = "task-1778042" [ 1691.769027] env[62619]: _type = "Task" [ 1691.769027] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.784269] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778042, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.934242] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9fe815-611a-4d11-bc42-c2c761371fc0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.946452] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b103085-0fb3-4eb3-8839-5b7054f13312 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.951445] env[62619]: DEBUG nova.network.neutron [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Successfully created port: 6ccfd58b-04e7-42b0-b5a3-e63d420ab341 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1691.954080] env[62619]: DEBUG nova.compute.manager [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1691.954576] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1691.955709] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d87452b-9aa6-49e1-9a9b-7d1a1549e4d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.964580] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1691.992309] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a05d6e5-3de4-4764-8e31-4e52b3942ed9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.995884] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d17072-882e-4916-9bc9-2cd5f38ead7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.010608] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a129d4-0dae-43db-ab7b-01298791aad3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.015716] env[62619]: DEBUG oslo_vmware.api [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1692.015716] env[62619]: value = "task-1778043" [ 1692.015716] env[62619]: _type = "Task" [ 1692.015716] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.025818] env[62619]: DEBUG nova.compute.provider_tree [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1692.033709] env[62619]: DEBUG oslo_vmware.api [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778043, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.046687] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.086047] env[62619]: DEBUG nova.compute.manager [req-4c5cf349-8a47-4f4d-8f49-2c2faa045324 req-81bbc964-6867-4e1a-b7e0-0c585df171f4 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Received event network-changed-db7fd312-3521-4a87-9acc-4d86d518b63c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1692.086399] env[62619]: DEBUG nova.compute.manager [req-4c5cf349-8a47-4f4d-8f49-2c2faa045324 req-81bbc964-6867-4e1a-b7e0-0c585df171f4 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Refreshing instance network info cache due to event network-changed-db7fd312-3521-4a87-9acc-4d86d518b63c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1692.086669] env[62619]: DEBUG oslo_concurrency.lockutils [req-4c5cf349-8a47-4f4d-8f49-2c2faa045324 req-81bbc964-6867-4e1a-b7e0-0c585df171f4 service nova] Acquiring lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1692.086897] env[62619]: DEBUG oslo_concurrency.lockutils [req-4c5cf349-8a47-4f4d-8f49-2c2faa045324 req-81bbc964-6867-4e1a-b7e0-0c585df171f4 service nova] Acquired lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1692.087639] env[62619]: DEBUG nova.network.neutron [req-4c5cf349-8a47-4f4d-8f49-2c2faa045324 req-81bbc964-6867-4e1a-b7e0-0c585df171f4 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Refreshing network info cache for port db7fd312-3521-4a87-9acc-4d86d518b63c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1692.283866] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778042, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.390436] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "8c296f2c-3e47-4431-b0c0-f7f1706c4a12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.390698] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "8c296f2c-3e47-4431-b0c0-f7f1706c4a12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.390932] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "8c296f2c-3e47-4431-b0c0-f7f1706c4a12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.391059] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "8c296f2c-3e47-4431-b0c0-f7f1706c4a12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.391059] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "8c296f2c-3e47-4431-b0c0-f7f1706c4a12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.393954] env[62619]: INFO nova.compute.manager [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Terminating instance [ 1692.527144] env[62619]: DEBUG oslo_vmware.api [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778043, 'name': PowerOffVM_Task, 'duration_secs': 0.337381} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.527449] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1692.527600] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1692.527864] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbc45f0d-1604-41b9-a242-6cce9a29a054 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.530835] env[62619]: DEBUG nova.scheduler.client.report [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1692.607984] env[62619]: DEBUG nova.compute.manager [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1692.614552] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1692.614855] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1692.615127] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleting the datastore file [datastore1] da806d3f-79f0-4188-a2d8-0beeb9dfec1a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1692.615449] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2c272c9-2ebc-4c79-9086-c353476376a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.623373] env[62619]: DEBUG oslo_vmware.api [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1692.623373] env[62619]: value = "task-1778045" [ 1692.623373] env[62619]: _type = "Task" [ 1692.623373] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.635895] env[62619]: DEBUG oslo_vmware.api [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.648155] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1692.648496] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1692.648737] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1692.649026] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1692.649189] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1692.649417] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1692.649688] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1692.649861] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1692.650122] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1692.650432] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1692.650710] env[62619]: DEBUG nova.virt.hardware [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1692.651525] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db93918-75e9-499b-8ab4-5a475715c79f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.660201] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d488b9-0229-4a85-a271-7c426b127a1e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.781359] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778042, 'name': Destroy_Task, 'duration_secs': 0.620188} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.781650] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Destroyed the VM [ 1692.781899] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1692.782170] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0d832b45-ce43-4ea0-89b8-703cc8f1debf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.788563] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1692.788563] env[62619]: value = "task-1778046" [ 1692.788563] env[62619]: _type = "Task" [ 1692.788563] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.797257] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778046, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.898933] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "refresh_cache-8c296f2c-3e47-4431-b0c0-f7f1706c4a12" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1692.899181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquired lock "refresh_cache-8c296f2c-3e47-4431-b0c0-f7f1706c4a12" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1692.899373] env[62619]: DEBUG nova.network.neutron [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1693.042024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.042024] env[62619]: DEBUG nova.compute.manager [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1693.045317] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.365s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.048028] env[62619]: DEBUG nova.objects.instance [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lazy-loading 'resources' on Instance uuid ac03bcf3-61df-4557-8018-0ad54ef30f17 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.136679] env[62619]: DEBUG oslo_vmware.api [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184192} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.139251] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1693.139251] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1693.139251] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1693.139251] env[62619]: INFO nova.compute.manager [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1693.139251] env[62619]: DEBUG oslo.service.loopingcall [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1693.139251] env[62619]: DEBUG nova.compute.manager [-] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1693.139251] env[62619]: DEBUG nova.network.neutron [-] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1693.205217] env[62619]: DEBUG nova.network.neutron [req-4c5cf349-8a47-4f4d-8f49-2c2faa045324 req-81bbc964-6867-4e1a-b7e0-0c585df171f4 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updated VIF entry in instance network info cache for port db7fd312-3521-4a87-9acc-4d86d518b63c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1693.205217] env[62619]: DEBUG nova.network.neutron [req-4c5cf349-8a47-4f4d-8f49-2c2faa045324 req-81bbc964-6867-4e1a-b7e0-0c585df171f4 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updating instance_info_cache with network_info: [{"id": "db7fd312-3521-4a87-9acc-4d86d518b63c", "address": "fa:16:3e:14:06:4e", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb7fd312-35", "ovs_interfaceid": "db7fd312-3521-4a87-9acc-4d86d518b63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.302145] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778046, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.425645] env[62619]: DEBUG nova.network.neutron [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1693.509575] env[62619]: DEBUG nova.network.neutron [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.549510] env[62619]: DEBUG nova.compute.utils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1693.550906] env[62619]: DEBUG nova.compute.manager [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1693.551093] env[62619]: DEBUG nova.network.neutron [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1693.602483] env[62619]: DEBUG nova.policy [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd106b23f779045f788b2536afd8c623d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2377a52a195d4f0b9181207ab5741734', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1693.707647] env[62619]: DEBUG oslo_concurrency.lockutils [req-4c5cf349-8a47-4f4d-8f49-2c2faa045324 req-81bbc964-6867-4e1a-b7e0-0c585df171f4 service nova] Releasing lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.811073] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778046, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.862102] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e830bd-f640-4311-b019-d1c2f53f090f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.871783] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f8a15e-3f08-4f71-b463-13d6c40bfbcc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.907459] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13173c88-141d-400e-afe0-ff06d536b324 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.916333] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7f7574-4d67-4c04-a7a6-a9ae5692e558 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.931228] env[62619]: DEBUG nova.compute.provider_tree [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1693.933593] env[62619]: DEBUG nova.network.neutron [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Successfully created port: ef12af5c-f7e0-44d8-9222-fa7401efd43d {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1694.012860] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Releasing lock "refresh_cache-8c296f2c-3e47-4431-b0c0-f7f1706c4a12" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.013803] env[62619]: DEBUG nova.compute.manager [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1694.013803] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1694.015256] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431ffbc0-2723-4f50-b829-2621a833ce00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.024429] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1694.024665] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d92d2e4-5333-4cec-86b4-ad2a4ddbd517 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.031631] env[62619]: DEBUG oslo_vmware.api [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1694.031631] env[62619]: value = "task-1778048" [ 1694.031631] env[62619]: _type = "Task" [ 1694.031631] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.041112] env[62619]: DEBUG oslo_vmware.api [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.058368] env[62619]: DEBUG nova.compute.manager [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1694.216642] env[62619]: DEBUG nova.network.neutron [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Successfully updated port: 6ccfd58b-04e7-42b0-b5a3-e63d420ab341 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1694.232148] env[62619]: DEBUG nova.compute.manager [req-1da333b5-0394-46e3-9e8c-14b45cf399e3 req-9a56a864-8f5d-40a3-aefc-56f1b124f0bc service nova] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Received event network-vif-plugged-6ccfd58b-04e7-42b0-b5a3-e63d420ab341 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1694.232378] env[62619]: DEBUG oslo_concurrency.lockutils [req-1da333b5-0394-46e3-9e8c-14b45cf399e3 req-9a56a864-8f5d-40a3-aefc-56f1b124f0bc service nova] Acquiring lock "e34a8173-952b-4ddc-90cf-3681387733fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.232584] env[62619]: DEBUG oslo_concurrency.lockutils [req-1da333b5-0394-46e3-9e8c-14b45cf399e3 req-9a56a864-8f5d-40a3-aefc-56f1b124f0bc service nova] Lock "e34a8173-952b-4ddc-90cf-3681387733fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.232739] env[62619]: DEBUG oslo_concurrency.lockutils [req-1da333b5-0394-46e3-9e8c-14b45cf399e3 req-9a56a864-8f5d-40a3-aefc-56f1b124f0bc service nova] Lock "e34a8173-952b-4ddc-90cf-3681387733fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.232896] env[62619]: DEBUG nova.compute.manager [req-1da333b5-0394-46e3-9e8c-14b45cf399e3 req-9a56a864-8f5d-40a3-aefc-56f1b124f0bc service nova] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] No waiting events found dispatching network-vif-plugged-6ccfd58b-04e7-42b0-b5a3-e63d420ab341 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1694.234254] env[62619]: WARNING nova.compute.manager [req-1da333b5-0394-46e3-9e8c-14b45cf399e3 req-9a56a864-8f5d-40a3-aefc-56f1b124f0bc service nova] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Received unexpected event network-vif-plugged-6ccfd58b-04e7-42b0-b5a3-e63d420ab341 for instance with vm_state building and task_state spawning. [ 1694.234504] env[62619]: DEBUG nova.compute.manager [req-1da333b5-0394-46e3-9e8c-14b45cf399e3 req-9a56a864-8f5d-40a3-aefc-56f1b124f0bc service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Received event network-vif-deleted-7df13a55-7d53-41b9-9489-591516bda30c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1694.234675] env[62619]: INFO nova.compute.manager [req-1da333b5-0394-46e3-9e8c-14b45cf399e3 req-9a56a864-8f5d-40a3-aefc-56f1b124f0bc service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Neutron deleted interface 7df13a55-7d53-41b9-9489-591516bda30c; detaching it from the instance and deleting it from the info cache [ 1694.234834] env[62619]: DEBUG nova.network.neutron [req-1da333b5-0394-46e3-9e8c-14b45cf399e3 req-9a56a864-8f5d-40a3-aefc-56f1b124f0bc service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.307870] env[62619]: DEBUG oslo_vmware.api [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778046, 'name': RemoveSnapshot_Task, 'duration_secs': 1.094341} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.308092] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1694.438648] env[62619]: DEBUG nova.scheduler.client.report [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1694.548420] env[62619]: DEBUG oslo_vmware.api [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778048, 'name': PowerOffVM_Task, 'duration_secs': 0.179883} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.548718] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1694.549317] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1694.549317] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c47efeb6-cd22-4b9c-83b1-694712422a26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.575687] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1694.575924] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1694.576147] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Deleting the datastore file [datastore1] 8c296f2c-3e47-4431-b0c0-f7f1706c4a12 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1694.576367] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c2f94c2-77c7-48c5-bf5d-14b5a560f93d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.583425] env[62619]: DEBUG oslo_vmware.api [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for the task: (returnval){ [ 1694.583425] env[62619]: value = "task-1778050" [ 1694.583425] env[62619]: _type = "Task" [ 1694.583425] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.586861] env[62619]: DEBUG nova.network.neutron [-] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.592211] env[62619]: DEBUG oslo_vmware.api [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.727203] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "refresh_cache-e34a8173-952b-4ddc-90cf-3681387733fa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1694.727203] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "refresh_cache-e34a8173-952b-4ddc-90cf-3681387733fa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.727203] env[62619]: DEBUG nova.network.neutron [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1694.738974] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12a125df-540f-4f16-974d-7ded27c694ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.747355] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36589992-eab7-433d-8070-8e8c4b5964a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.785536] env[62619]: DEBUG nova.compute.manager [req-1da333b5-0394-46e3-9e8c-14b45cf399e3 req-9a56a864-8f5d-40a3-aefc-56f1b124f0bc service nova] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Detach interface failed, port_id=7df13a55-7d53-41b9-9489-591516bda30c, reason: Instance da806d3f-79f0-4188-a2d8-0beeb9dfec1a could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1694.813208] env[62619]: WARNING nova.compute.manager [None req-0f6f84b7-40c7-4a25-8ab6-ef6632032d2c tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Image not found during snapshot: nova.exception.ImageNotFound: Image 54ee24e3-804e-4db8-ad12-2312cca164f9 could not be found. [ 1694.949403] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.905s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.951831] env[62619]: DEBUG oslo_concurrency.lockutils [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.485s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.952455] env[62619]: DEBUG nova.objects.instance [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lazy-loading 'resources' on Instance uuid cb33580e-d70d-4557-98fe-e673d93f3307 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1694.975671] env[62619]: INFO nova.scheduler.client.report [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Deleted allocations for instance ac03bcf3-61df-4557-8018-0ad54ef30f17 [ 1695.072823] env[62619]: DEBUG nova.compute.manager [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1695.089068] env[62619]: INFO nova.compute.manager [-] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Took 1.95 seconds to deallocate network for instance. [ 1695.099859] env[62619]: DEBUG oslo_vmware.api [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Task: {'id': task-1778050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209245} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.103410] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1695.103410] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1695.103410] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1695.103410] env[62619]: INFO nova.compute.manager [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1695.103410] env[62619]: DEBUG oslo.service.loopingcall [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1695.104700] env[62619]: DEBUG nova.compute.manager [-] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1695.105041] env[62619]: DEBUG nova.network.neutron [-] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1695.111033] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1695.111033] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1695.111033] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1695.111033] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1695.111033] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1695.111033] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1695.111033] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1695.111033] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1695.112672] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1695.112672] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1695.112672] env[62619]: DEBUG nova.virt.hardware [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1695.116366] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e5b99b-8d6c-43f8-9169-718b66d86bf0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.122038] env[62619]: DEBUG nova.network.neutron [-] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1695.129622] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6eb3be-ef5c-4ccd-a146-1880f4b66af2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.151595] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "b3d9c418-f521-4770-a381-5238be6cc33c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.151595] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "b3d9c418-f521-4770-a381-5238be6cc33c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.151595] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "b3d9c418-f521-4770-a381-5238be6cc33c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.151918] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "b3d9c418-f521-4770-a381-5238be6cc33c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.152330] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "b3d9c418-f521-4770-a381-5238be6cc33c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.156086] env[62619]: INFO nova.compute.manager [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Terminating instance [ 1695.260914] env[62619]: DEBUG nova.network.neutron [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1695.446999] env[62619]: DEBUG nova.network.neutron [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Updating instance_info_cache with network_info: [{"id": "6ccfd58b-04e7-42b0-b5a3-e63d420ab341", "address": "fa:16:3e:1c:e6:ce", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ccfd58b-04", "ovs_interfaceid": "6ccfd58b-04e7-42b0-b5a3-e63d420ab341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.487984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc9f0fd0-b023-4a4e-a171-8b21f222858c tempest-ServersAdminTestJSON-1690188522 tempest-ServersAdminTestJSON-1690188522-project-member] Lock "ac03bcf3-61df-4557-8018-0ad54ef30f17" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.451s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.597275] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.625501] env[62619]: DEBUG nova.network.neutron [-] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.660190] env[62619]: DEBUG nova.compute.manager [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1695.660378] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1695.663943] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d65e48-5ec0-451d-a4a2-681429fc73ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.674481] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1695.674769] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce937695-bab8-41e4-9c20-6f92baf0caf8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.681610] env[62619]: DEBUG oslo_vmware.api [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1695.681610] env[62619]: value = "task-1778052" [ 1695.681610] env[62619]: _type = "Task" [ 1695.681610] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.694442] env[62619]: DEBUG oslo_vmware.api [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778052, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.767553] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.767920] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.770009] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da48194-72d2-48e3-8813-344b5ea531c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.781016] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf56bbc2-c28d-4e75-a2c9-fc7555452e67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.825929] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cff8e5-1124-46c5-8036-287554e77be9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.835623] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e2c0f2-c08d-402f-84b6-ec597b2781da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.850944] env[62619]: DEBUG nova.compute.provider_tree [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1695.950236] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "refresh_cache-e34a8173-952b-4ddc-90cf-3681387733fa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.950587] env[62619]: DEBUG nova.compute.manager [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Instance network_info: |[{"id": "6ccfd58b-04e7-42b0-b5a3-e63d420ab341", "address": "fa:16:3e:1c:e6:ce", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ccfd58b-04", "ovs_interfaceid": "6ccfd58b-04e7-42b0-b5a3-e63d420ab341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1695.951027] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:e6:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ccfd58b-04e7-42b0-b5a3-e63d420ab341', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1695.960466] env[62619]: DEBUG oslo.service.loopingcall [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1695.960758] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1695.961031] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8754e310-eff3-42fe-8719-07f99a48d10c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.983880] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1695.983880] env[62619]: value = "task-1778053" [ 1695.983880] env[62619]: _type = "Task" [ 1695.983880] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.990390] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778053, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.128903] env[62619]: INFO nova.compute.manager [-] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Took 1.02 seconds to deallocate network for instance. [ 1696.194054] env[62619]: DEBUG oslo_vmware.api [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778052, 'name': PowerOffVM_Task, 'duration_secs': 0.263473} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.194054] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1696.194054] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1696.194836] env[62619]: DEBUG nova.compute.manager [req-ce30a821-dd9e-44b6-903d-1dda49974303 req-859cf23e-edbf-4ee6-a962-69159e19899d service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Received event network-vif-plugged-ef12af5c-f7e0-44d8-9222-fa7401efd43d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1696.195228] env[62619]: DEBUG oslo_concurrency.lockutils [req-ce30a821-dd9e-44b6-903d-1dda49974303 req-859cf23e-edbf-4ee6-a962-69159e19899d service nova] Acquiring lock "34180944-02f7-4115-8178-64f2f2591080-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.195534] env[62619]: DEBUG oslo_concurrency.lockutils [req-ce30a821-dd9e-44b6-903d-1dda49974303 req-859cf23e-edbf-4ee6-a962-69159e19899d service nova] Lock "34180944-02f7-4115-8178-64f2f2591080-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.196081] env[62619]: DEBUG oslo_concurrency.lockutils [req-ce30a821-dd9e-44b6-903d-1dda49974303 req-859cf23e-edbf-4ee6-a962-69159e19899d service nova] Lock "34180944-02f7-4115-8178-64f2f2591080-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.196309] env[62619]: DEBUG nova.compute.manager [req-ce30a821-dd9e-44b6-903d-1dda49974303 req-859cf23e-edbf-4ee6-a962-69159e19899d service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] No waiting events found dispatching network-vif-plugged-ef12af5c-f7e0-44d8-9222-fa7401efd43d {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1696.196529] env[62619]: WARNING nova.compute.manager [req-ce30a821-dd9e-44b6-903d-1dda49974303 req-859cf23e-edbf-4ee6-a962-69159e19899d service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Received unexpected event network-vif-plugged-ef12af5c-f7e0-44d8-9222-fa7401efd43d for instance with vm_state building and task_state spawning. [ 1696.196884] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7386958-cc2f-449d-b299-ea6a550e0b1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.271948] env[62619]: DEBUG nova.compute.manager [req-9a72f654-8dca-47b2-bac5-e9ee8873a8b4 req-3e58031c-fa0c-4d21-ba3d-0170a7991af7 service nova] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Received event network-changed-6ccfd58b-04e7-42b0-b5a3-e63d420ab341 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1696.272174] env[62619]: DEBUG nova.compute.manager [req-9a72f654-8dca-47b2-bac5-e9ee8873a8b4 req-3e58031c-fa0c-4d21-ba3d-0170a7991af7 service nova] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Refreshing instance network info cache due to event network-changed-6ccfd58b-04e7-42b0-b5a3-e63d420ab341. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1696.272388] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a72f654-8dca-47b2-bac5-e9ee8873a8b4 req-3e58031c-fa0c-4d21-ba3d-0170a7991af7 service nova] Acquiring lock "refresh_cache-e34a8173-952b-4ddc-90cf-3681387733fa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.272522] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a72f654-8dca-47b2-bac5-e9ee8873a8b4 req-3e58031c-fa0c-4d21-ba3d-0170a7991af7 service nova] Acquired lock "refresh_cache-e34a8173-952b-4ddc-90cf-3681387733fa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.272702] env[62619]: DEBUG nova.network.neutron [req-9a72f654-8dca-47b2-bac5-e9ee8873a8b4 req-3e58031c-fa0c-4d21-ba3d-0170a7991af7 service nova] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Refreshing network info cache for port 6ccfd58b-04e7-42b0-b5a3-e63d420ab341 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1696.275280] env[62619]: DEBUG nova.compute.manager [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1696.307916] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1696.307916] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1696.308054] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleting the datastore file [datastore1] b3d9c418-f521-4770-a381-5238be6cc33c {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1696.308299] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e206741-5473-4ba6-a374-822310536aeb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.319116] env[62619]: DEBUG oslo_vmware.api [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for the task: (returnval){ [ 1696.319116] env[62619]: value = "task-1778055" [ 1696.319116] env[62619]: _type = "Task" [ 1696.319116] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.338751] env[62619]: DEBUG oslo_vmware.api [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.354991] env[62619]: DEBUG nova.scheduler.client.report [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1696.493996] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778053, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.503023] env[62619]: DEBUG nova.network.neutron [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Successfully updated port: ef12af5c-f7e0-44d8-9222-fa7401efd43d {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1696.639910] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.797608] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.837221] env[62619]: DEBUG oslo_vmware.api [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Task: {'id': task-1778055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148673} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.837611] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1696.837907] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1696.839216] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1696.839377] env[62619]: INFO nova.compute.manager [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1696.839741] env[62619]: DEBUG oslo.service.loopingcall [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1696.840840] env[62619]: DEBUG nova.compute.manager [-] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1696.841974] env[62619]: DEBUG nova.network.neutron [-] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1696.865640] env[62619]: DEBUG oslo_concurrency.lockutils [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.913s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.870425] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 9.216s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.893567] env[62619]: INFO nova.scheduler.client.report [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Deleted allocations for instance cb33580e-d70d-4557-98fe-e673d93f3307 [ 1696.994966] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778053, 'name': CreateVM_Task, 'duration_secs': 0.617781} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.002706] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1697.003192] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1697.003439] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1697.003687] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1697.004338] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-587c0164-989e-4ab6-917e-31383a583b23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.013351] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "refresh_cache-34180944-02f7-4115-8178-64f2f2591080" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1697.013351] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "refresh_cache-34180944-02f7-4115-8178-64f2f2591080" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1697.013520] env[62619]: DEBUG nova.network.neutron [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1697.019445] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1697.019445] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52671716-08c6-e1dc-43fa-e49bd0b1164e" [ 1697.019445] env[62619]: _type = "Task" [ 1697.019445] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.027986] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52671716-08c6-e1dc-43fa-e49bd0b1164e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.159083] env[62619]: DEBUG nova.network.neutron [req-9a72f654-8dca-47b2-bac5-e9ee8873a8b4 req-3e58031c-fa0c-4d21-ba3d-0170a7991af7 service nova] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Updated VIF entry in instance network info cache for port 6ccfd58b-04e7-42b0-b5a3-e63d420ab341. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1697.159864] env[62619]: DEBUG nova.network.neutron [req-9a72f654-8dca-47b2-bac5-e9ee8873a8b4 req-3e58031c-fa0c-4d21-ba3d-0170a7991af7 service nova] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Updating instance_info_cache with network_info: [{"id": "6ccfd58b-04e7-42b0-b5a3-e63d420ab341", "address": "fa:16:3e:1c:e6:ce", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ccfd58b-04", "ovs_interfaceid": "6ccfd58b-04e7-42b0-b5a3-e63d420ab341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.410370] env[62619]: DEBUG oslo_concurrency.lockutils [None req-88a790da-2076-4fec-bc92-8f9b4767aed2 tempest-ImagesOneServerNegativeTestJSON-913982911 tempest-ImagesOneServerNegativeTestJSON-913982911-project-member] Lock "cb33580e-d70d-4557-98fe-e673d93f3307" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.390s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.531162] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52671716-08c6-e1dc-43fa-e49bd0b1164e, 'name': SearchDatastore_Task, 'duration_secs': 0.009402} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.531497] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1697.531733] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1697.531969] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1697.532133] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1697.532685] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1697.532685] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2ce2a33-250e-4e94-942c-52c5ee024a27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.547446] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1697.547882] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1697.551499] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b23f8824-37ae-4798-bfac-73fafc4551b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.560987] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1697.560987] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52887abc-21d3-9177-ed22-9a8fccd3dece" [ 1697.560987] env[62619]: _type = "Task" [ 1697.560987] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.561871] env[62619]: DEBUG nova.network.neutron [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1697.577385] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52887abc-21d3-9177-ed22-9a8fccd3dece, 'name': SearchDatastore_Task, 'duration_secs': 0.014884} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.578303] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e25c75bb-e369-4c7b-9745-d649fc67ceb5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.583458] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1697.583458] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521d4dee-9aa1-ffec-8319-e1fd0cfb438d" [ 1697.583458] env[62619]: _type = "Task" [ 1697.583458] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.599253] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521d4dee-9aa1-ffec-8319-e1fd0cfb438d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.655452] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67d155c-3f1d-45b7-a702-11e6c89b044f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.663171] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a72f654-8dca-47b2-bac5-e9ee8873a8b4 req-3e58031c-fa0c-4d21-ba3d-0170a7991af7 service nova] Releasing lock "refresh_cache-e34a8173-952b-4ddc-90cf-3681387733fa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1697.664539] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8169030-cbff-4cbc-95be-0ba4ac5ac3d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.700373] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f52190-655d-47f7-a90c-c83be65a652d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.709213] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a15980-a5f2-49a7-8715-69a3d6449986 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.731276] env[62619]: DEBUG nova.compute.provider_tree [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1697.784042] env[62619]: DEBUG nova.network.neutron [-] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.852312] env[62619]: DEBUG nova.network.neutron [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Updating instance_info_cache with network_info: [{"id": "ef12af5c-f7e0-44d8-9222-fa7401efd43d", "address": "fa:16:3e:67:86:b1", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef12af5c-f7", "ovs_interfaceid": "ef12af5c-f7e0-44d8-9222-fa7401efd43d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.095250] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521d4dee-9aa1-ffec-8319-e1fd0cfb438d, 'name': SearchDatastore_Task, 'duration_secs': 0.013979} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.095250] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.095250] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e34a8173-952b-4ddc-90cf-3681387733fa/e34a8173-952b-4ddc-90cf-3681387733fa.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1698.095460] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27e372cf-8cbe-4457-9e5e-36704099843f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.102296] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1698.102296] env[62619]: value = "task-1778057" [ 1698.102296] env[62619]: _type = "Task" [ 1698.102296] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.112424] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778057, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.226873] env[62619]: DEBUG nova.compute.manager [req-cca15741-8226-4eb1-85b7-1aab953af7e2 req-2ee9cec9-6afa-48c1-87b7-dc5d29090035 service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Received event network-changed-ef12af5c-f7e0-44d8-9222-fa7401efd43d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1698.227166] env[62619]: DEBUG nova.compute.manager [req-cca15741-8226-4eb1-85b7-1aab953af7e2 req-2ee9cec9-6afa-48c1-87b7-dc5d29090035 service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Refreshing instance network info cache due to event network-changed-ef12af5c-f7e0-44d8-9222-fa7401efd43d. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1698.227421] env[62619]: DEBUG oslo_concurrency.lockutils [req-cca15741-8226-4eb1-85b7-1aab953af7e2 req-2ee9cec9-6afa-48c1-87b7-dc5d29090035 service nova] Acquiring lock "refresh_cache-34180944-02f7-4115-8178-64f2f2591080" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1698.233176] env[62619]: DEBUG nova.scheduler.client.report [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1698.286335] env[62619]: INFO nova.compute.manager [-] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Took 1.45 seconds to deallocate network for instance. [ 1698.305388] env[62619]: DEBUG nova.compute.manager [req-b691d128-f70e-4ba6-ad71-027165ecba3b req-92b1c887-989c-45c6-b974-404e62e8fc9c service nova] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Received event network-vif-deleted-24c2e90b-5ba7-4ae7-92a8-0666ce5bd797 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1698.356319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "refresh_cache-34180944-02f7-4115-8178-64f2f2591080" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.356840] env[62619]: DEBUG nova.compute.manager [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Instance network_info: |[{"id": "ef12af5c-f7e0-44d8-9222-fa7401efd43d", "address": "fa:16:3e:67:86:b1", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef12af5c-f7", "ovs_interfaceid": "ef12af5c-f7e0-44d8-9222-fa7401efd43d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1698.357094] env[62619]: DEBUG oslo_concurrency.lockutils [req-cca15741-8226-4eb1-85b7-1aab953af7e2 req-2ee9cec9-6afa-48c1-87b7-dc5d29090035 service nova] Acquired lock "refresh_cache-34180944-02f7-4115-8178-64f2f2591080" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.357291] env[62619]: DEBUG nova.network.neutron [req-cca15741-8226-4eb1-85b7-1aab953af7e2 req-2ee9cec9-6afa-48c1-87b7-dc5d29090035 service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Refreshing network info cache for port ef12af5c-f7e0-44d8-9222-fa7401efd43d {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1698.358636] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:86:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef12af5c-f7e0-44d8-9222-fa7401efd43d', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1698.370263] env[62619]: DEBUG oslo.service.loopingcall [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1698.371138] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34180944-02f7-4115-8178-64f2f2591080] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1698.371385] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8154cf27-9574-453d-b542-a0e4f21c17eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.394195] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1698.394195] env[62619]: value = "task-1778058" [ 1698.394195] env[62619]: _type = "Task" [ 1698.394195] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.404076] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778058, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.613566] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778057, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49961} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.613938] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e34a8173-952b-4ddc-90cf-3681387733fa/e34a8173-952b-4ddc-90cf-3681387733fa.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1698.614621] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1698.614900] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e83cae65-dc5d-49c9-a2a5-e0f7480f820f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.621305] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1698.621305] env[62619]: value = "task-1778059" [ 1698.621305] env[62619]: _type = "Task" [ 1698.621305] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.630356] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778059, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.792954] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.906926] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778058, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.132386] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778059, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066712} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.132668] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1699.133522] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18289873-9349-4c48-80c7-5128b73b6f07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.172200] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] e34a8173-952b-4ddc-90cf-3681387733fa/e34a8173-952b-4ddc-90cf-3681387733fa.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1699.172602] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae96b127-1185-4d7c-b264-ec433baefc15 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.200611] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1699.200611] env[62619]: value = "task-1778060" [ 1699.200611] env[62619]: _type = "Task" [ 1699.200611] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.210965] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778060, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.251083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.381s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.254421] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.208s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.254612] env[62619]: DEBUG nova.objects.instance [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1699.410207] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778058, 'name': CreateVM_Task, 'duration_secs': 0.609415} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.410386] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34180944-02f7-4115-8178-64f2f2591080] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1699.411348] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.414019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.414019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1699.414019] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5187d4da-f498-45d4-bed2-27f3f7361e80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.418149] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1699.418149] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ffe041-2e39-2a7b-b457-2d09376e74b8" [ 1699.418149] env[62619]: _type = "Task" [ 1699.418149] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.429828] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ffe041-2e39-2a7b-b457-2d09376e74b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.518802] env[62619]: DEBUG oslo_vmware.rw_handles [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52950467-6d0c-4f6a-aa7f-b529e37f8d71/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1699.519978] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17e0f9b-354f-40d3-980d-e8ceea1d94d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.523552] env[62619]: DEBUG nova.network.neutron [req-cca15741-8226-4eb1-85b7-1aab953af7e2 req-2ee9cec9-6afa-48c1-87b7-dc5d29090035 service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Updated VIF entry in instance network info cache for port ef12af5c-f7e0-44d8-9222-fa7401efd43d. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1699.524146] env[62619]: DEBUG nova.network.neutron [req-cca15741-8226-4eb1-85b7-1aab953af7e2 req-2ee9cec9-6afa-48c1-87b7-dc5d29090035 service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Updating instance_info_cache with network_info: [{"id": "ef12af5c-f7e0-44d8-9222-fa7401efd43d", "address": "fa:16:3e:67:86:b1", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef12af5c-f7", "ovs_interfaceid": "ef12af5c-f7e0-44d8-9222-fa7401efd43d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.533822] env[62619]: DEBUG oslo_vmware.rw_handles [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52950467-6d0c-4f6a-aa7f-b529e37f8d71/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1699.534906] env[62619]: ERROR oslo_vmware.rw_handles [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52950467-6d0c-4f6a-aa7f-b529e37f8d71/disk-0.vmdk due to incomplete transfer. [ 1699.534906] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ada1608e-0537-4ce6-8ddc-36bca76e2ba1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.551057] env[62619]: DEBUG oslo_vmware.rw_handles [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52950467-6d0c-4f6a-aa7f-b529e37f8d71/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1699.551300] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Uploaded image 034267b5-5870-4201-8726-91111429c131 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1699.553796] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1699.554084] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e840369a-3f54-41f4-81a0-c2bbe230c47e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.561681] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1699.561681] env[62619]: value = "task-1778061" [ 1699.561681] env[62619]: _type = "Task" [ 1699.561681] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.570914] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778061, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.711163] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778060, 'name': ReconfigVM_Task, 'duration_secs': 0.375256} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.711473] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Reconfigured VM instance instance-00000050 to attach disk [datastore1] e34a8173-952b-4ddc-90cf-3681387733fa/e34a8173-952b-4ddc-90cf-3681387733fa.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1699.712786] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84429617-cc44-42d5-b8d2-ee192d363aa9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.719126] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1699.719126] env[62619]: value = "task-1778062" [ 1699.719126] env[62619]: _type = "Task" [ 1699.719126] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.728921] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778062, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.820859] env[62619]: INFO nova.scheduler.client.report [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted allocation for migration 0025d712-72a0-433a-9c2a-ce2a5e846a5e [ 1699.931626] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ffe041-2e39-2a7b-b457-2d09376e74b8, 'name': SearchDatastore_Task, 'duration_secs': 0.010612} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.931944] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1699.932213] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1699.932504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.932662] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.932868] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1699.933350] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7ad4873-db41-4c6b-825b-1a95209d511f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.943113] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1699.943589] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1699.944211] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f56833a0-d1f9-4c65-9bd9-a865c91fa797 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.952768] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1699.952768] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5221b7c8-a6da-ee6e-e10c-73b8269f9190" [ 1699.952768] env[62619]: _type = "Task" [ 1699.952768] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.961772] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5221b7c8-a6da-ee6e-e10c-73b8269f9190, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.030268] env[62619]: DEBUG oslo_concurrency.lockutils [req-cca15741-8226-4eb1-85b7-1aab953af7e2 req-2ee9cec9-6afa-48c1-87b7-dc5d29090035 service nova] Releasing lock "refresh_cache-34180944-02f7-4115-8178-64f2f2591080" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.073852] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778061, 'name': Destroy_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.229470] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778062, 'name': Rename_Task, 'duration_secs': 0.137803} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.229745] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1700.229995] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3b6b5cc-536f-4982-9c7f-21fc3dda4914 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.237171] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1700.237171] env[62619]: value = "task-1778063" [ 1700.237171] env[62619]: _type = "Task" [ 1700.237171] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.246456] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778063, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.272138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b1e763-ade9-42aa-a38f-3ee7dfdcc7e8 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.274311] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.677s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.274860] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.277743] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.638s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.277979] env[62619]: DEBUG nova.objects.instance [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lazy-loading 'resources' on Instance uuid 8c296f2c-3e47-4431-b0c0-f7f1706c4a12 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1700.306592] env[62619]: INFO nova.scheduler.client.report [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleted allocations for instance da806d3f-79f0-4188-a2d8-0beeb9dfec1a [ 1700.328867] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fa9b4dfb-0d05-438d-9536-dfb06d60d5bb tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 15.795s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.464163] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5221b7c8-a6da-ee6e-e10c-73b8269f9190, 'name': SearchDatastore_Task, 'duration_secs': 0.010493} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.467021] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-157a621e-a2ba-4b04-968b-be09a65a62d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.473190] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1700.473190] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e48081-4e44-46a6-b379-d71c2d26c85c" [ 1700.473190] env[62619]: _type = "Task" [ 1700.473190] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.484288] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e48081-4e44-46a6-b379-d71c2d26c85c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.576836] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778061, 'name': Destroy_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.716213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Acquiring lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.717110] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.748245] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778063, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.821159] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2f37276-871c-472e-b72b-d91da6e403c8 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "da806d3f-79f0-4188-a2d8-0beeb9dfec1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.386s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.986324] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e48081-4e44-46a6-b379-d71c2d26c85c, 'name': SearchDatastore_Task, 'duration_secs': 0.015572} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.986646] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.986871] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 34180944-02f7-4115-8178-64f2f2591080/34180944-02f7-4115-8178-64f2f2591080.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1700.987202] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abb65df5-9e12-475d-85cc-edd32236f991 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.999199] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1700.999199] env[62619]: value = "task-1778064" [ 1700.999199] env[62619]: _type = "Task" [ 1700.999199] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.011680] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.042999] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48da7e88-8701-42df-9ab6-ad44ba5e5654 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.052441] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88d0c73-95db-4e71-a3d9-6739d1580241 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.094900] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e995c7-f842-429a-a157-8d28066d8e0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.104681] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778061, 'name': Destroy_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.108201] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f133e149-3f74-46ed-97c9-7ba1143db5e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.124453] env[62619]: DEBUG nova.compute.provider_tree [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1701.219744] env[62619]: DEBUG nova.compute.manager [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1701.250020] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778063, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.511417] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778064, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.430079} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.511703] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 34180944-02f7-4115-8178-64f2f2591080/34180944-02f7-4115-8178-64f2f2591080.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1701.512114] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1701.512190] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1998e42b-effd-4635-b478-f746382c2ac6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.520829] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1701.520829] env[62619]: value = "task-1778065" [ 1701.520829] env[62619]: _type = "Task" [ 1701.520829] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.530582] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778065, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.600587] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778061, 'name': Destroy_Task, 'duration_secs': 1.547179} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.600904] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Destroyed the VM [ 1701.601177] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1701.601435] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-931a7e1c-5882-4a20-8254-4be10611a6e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.610712] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1701.610712] env[62619]: value = "task-1778066" [ 1701.610712] env[62619]: _type = "Task" [ 1701.610712] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.619083] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778066, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.631218] env[62619]: DEBUG nova.scheduler.client.report [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1701.748946] env[62619]: DEBUG oslo_vmware.api [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778063, 'name': PowerOnVM_Task, 'duration_secs': 1.358402} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.749960] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1701.750198] env[62619]: INFO nova.compute.manager [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Took 9.14 seconds to spawn the instance on the hypervisor. [ 1701.750378] env[62619]: DEBUG nova.compute.manager [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1701.751420] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46205df-8618-44ef-8e94-0290cd4d53ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.755605] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.036263] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778065, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072201} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.036399] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1702.037418] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23d0785-8fb5-4f41-a1ce-df1e0b61cccf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.064091] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 34180944-02f7-4115-8178-64f2f2591080/34180944-02f7-4115-8178-64f2f2591080.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1702.064934] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6181719-573e-47ed-b1c8-892f15e5d419 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.089741] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1702.089741] env[62619]: value = "task-1778067" [ 1702.089741] env[62619]: _type = "Task" [ 1702.089741] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.100841] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778067, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.124563] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778066, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.136153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.858s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.138565] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.341s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.140270] env[62619]: INFO nova.compute.claims [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1702.162785] env[62619]: INFO nova.scheduler.client.report [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Deleted allocations for instance 8c296f2c-3e47-4431-b0c0-f7f1706c4a12 [ 1702.278708] env[62619]: INFO nova.compute.manager [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Took 21.17 seconds to build instance. [ 1702.497601] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "cbff225f-2d11-4a43-a320-95dd3afb8e48" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.497905] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.498192] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "cbff225f-2d11-4a43-a320-95dd3afb8e48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.498414] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.499570] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.505025] env[62619]: INFO nova.compute.manager [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Terminating instance [ 1702.602141] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778067, 'name': ReconfigVM_Task, 'duration_secs': 0.422082} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.602485] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 34180944-02f7-4115-8178-64f2f2591080/34180944-02f7-4115-8178-64f2f2591080.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1702.603181] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82576edf-9d54-4136-9b80-7fd4f40a5bd0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.613975] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1702.613975] env[62619]: value = "task-1778068" [ 1702.613975] env[62619]: _type = "Task" [ 1702.613975] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.629568] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778066, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.633523] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778068, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.674412] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cd1ab36-c5d9-42f0-b13f-004f0a683e00 tempest-ServerShowV254Test-912814971 tempest-ServerShowV254Test-912814971-project-member] Lock "8c296f2c-3e47-4431-b0c0-f7f1706c4a12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.284s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.780734] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ca7f09c-0f8b-4868-93a9-7ce4a1257d52 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e34a8173-952b-4ddc-90cf-3681387733fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.682s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.910857] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.910857] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.012504] env[62619]: DEBUG nova.compute.manager [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1703.012728] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1703.016705] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b14ea31-9117-40a0-a470-e63f30e3d148 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.026762] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1703.026762] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfa8b371-f85d-4d12-93cb-ef75dd3486f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.037880] env[62619]: DEBUG oslo_vmware.api [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1703.037880] env[62619]: value = "task-1778069" [ 1703.037880] env[62619]: _type = "Task" [ 1703.037880] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.050988] env[62619]: DEBUG oslo_vmware.api [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.056468] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "7cb51b51-514d-4223-a82a-5cdbdab9482a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.056468] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.133111] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778066, 'name': RemoveSnapshot_Task, 'duration_secs': 1.312147} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.138440] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1703.138865] env[62619]: DEBUG nova.compute.manager [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1703.139589] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778068, 'name': Rename_Task, 'duration_secs': 0.156533} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.140342] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63720b90-7741-4a87-a4c8-4a6d9fbb13fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.143031] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1703.144103] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2f379d7-4c48-4cb8-90ed-dab31978d0ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.159469] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1703.159469] env[62619]: value = "task-1778070" [ 1703.159469] env[62619]: _type = "Task" [ 1703.159469] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.169197] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778070, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.414328] env[62619]: DEBUG nova.compute.manager [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1703.551139] env[62619]: DEBUG oslo_vmware.api [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.553958] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f38374-de56-4804-bf75-22f067f0f03e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.564472] env[62619]: DEBUG nova.compute.manager [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1703.569030] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f366645-5245-4982-bb31-a97b9e205844 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.612072] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2691fe06-50ec-432c-9dee-4cd770f5b100 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.620927] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99349873-2279-4982-a5bd-e256484af0bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.637459] env[62619]: DEBUG nova.compute.provider_tree [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1703.661057] env[62619]: INFO nova.compute.manager [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Shelve offloading [ 1703.677982] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778070, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.950095] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.050897] env[62619]: DEBUG oslo_vmware.api [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778069, 'name': PowerOffVM_Task, 'duration_secs': 0.908315} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.051106] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1704.051299] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1704.051569] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4110f88-334f-4a28-8a2b-bb54e2431355 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.092632] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.144026] env[62619]: DEBUG nova.scheduler.client.report [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1704.170228] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1704.178704] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6986661-c263-46ca-a6f0-958bab7c87d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.181660] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778070, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.189942] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1704.189942] env[62619]: value = "task-1778072" [ 1704.189942] env[62619]: _type = "Task" [ 1704.189942] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.202563] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1704.202816] env[62619]: DEBUG nova.compute.manager [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1704.203748] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1950f405-8930-47e3-92c7-facb0fbbf24c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.216753] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "a3101076-36d6-409a-8072-638107e63073" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.217131] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "a3101076-36d6-409a-8072-638107e63073" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.221591] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.221591] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.221591] env[62619]: DEBUG nova.network.neutron [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1704.258795] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1704.259166] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1704.259210] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleting the datastore file [datastore1] cbff225f-2d11-4a43-a320-95dd3afb8e48 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1704.259496] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aebdb4b5-8395-41f7-b37a-9dea3753aab8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.269228] env[62619]: DEBUG oslo_vmware.api [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1704.269228] env[62619]: value = "task-1778073" [ 1704.269228] env[62619]: _type = "Task" [ 1704.269228] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.281155] env[62619]: DEBUG oslo_vmware.api [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778073, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.650675] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.654222] env[62619]: DEBUG nova.compute.manager [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1704.655174] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.862s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.655548] env[62619]: DEBUG nova.objects.instance [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lazy-loading 'resources' on Instance uuid b3d9c418-f521-4770-a381-5238be6cc33c {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1704.674736] env[62619]: DEBUG oslo_vmware.api [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778070, 'name': PowerOnVM_Task, 'duration_secs': 1.043447} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.675207] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1704.675632] env[62619]: INFO nova.compute.manager [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Took 9.60 seconds to spawn the instance on the hypervisor. [ 1704.676323] env[62619]: DEBUG nova.compute.manager [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1704.677406] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fc1f39-c45f-441f-9b03-631e8fc49a70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.723407] env[62619]: DEBUG nova.compute.manager [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1704.790025] env[62619]: DEBUG oslo_vmware.api [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778073, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263397} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.790345] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1704.790527] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1704.790712] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1704.790877] env[62619]: INFO nova.compute.manager [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Took 1.78 seconds to destroy the instance on the hypervisor. [ 1704.791133] env[62619]: DEBUG oslo.service.loopingcall [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1704.791316] env[62619]: DEBUG nova.compute.manager [-] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1704.791404] env[62619]: DEBUG nova.network.neutron [-] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1705.075504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "8745aa7f-9848-4320-94b5-08b7e3bccf80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.075504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "8745aa7f-9848-4320-94b5-08b7e3bccf80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.116640] env[62619]: DEBUG nova.network.neutron [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updating instance_info_cache with network_info: [{"id": "a10c5399-b021-4ea7-8a41-4d58136aff12", "address": "fa:16:3e:14:4c:3d", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa10c5399-b0", "ovs_interfaceid": "a10c5399-b021-4ea7-8a41-4d58136aff12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.157704] env[62619]: DEBUG nova.compute.utils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1705.162666] env[62619]: DEBUG nova.compute.manager [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1705.162836] env[62619]: DEBUG nova.network.neutron [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1705.210322] env[62619]: INFO nova.compute.manager [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Took 23.68 seconds to build instance. [ 1705.226015] env[62619]: DEBUG nova.policy [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15bd98bac83146b0899cc8776fece70e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '079ddd8f5dc14fa699b4961995733f95', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1705.264471] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.327662] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "a6ba8114-0261-4894-98c0-9e0360f6d256" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.327913] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "a6ba8114-0261-4894-98c0-9e0360f6d256" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.328227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "a6ba8114-0261-4894-98c0-9e0360f6d256-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.328431] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "a6ba8114-0261-4894-98c0-9e0360f6d256-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.328600] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "a6ba8114-0261-4894-98c0-9e0360f6d256-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.330812] env[62619]: INFO nova.compute.manager [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Terminating instance [ 1705.392475] env[62619]: DEBUG nova.compute.manager [req-c1d08a57-c818-4625-94da-2f108167a1e3 req-2d44a191-7624-435d-b67d-bdf5dfbeb019 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Received event network-vif-deleted-f289fb71-1285-4a29-9580-10815cd08cba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1705.392591] env[62619]: INFO nova.compute.manager [req-c1d08a57-c818-4625-94da-2f108167a1e3 req-2d44a191-7624-435d-b67d-bdf5dfbeb019 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Neutron deleted interface f289fb71-1285-4a29-9580-10815cd08cba; detaching it from the instance and deleting it from the info cache [ 1705.392773] env[62619]: DEBUG nova.network.neutron [req-c1d08a57-c818-4625-94da-2f108167a1e3 req-2d44a191-7624-435d-b67d-bdf5dfbeb019 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.531297] env[62619]: DEBUG nova.network.neutron [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Successfully created port: 47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1705.582249] env[62619]: DEBUG nova.compute.manager [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1705.588982] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "e302e431-1f95-4ab5-bfca-59450fd887f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.588982] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "e302e431-1f95-4ab5-bfca-59450fd887f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.620065] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.634544] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31067dc3-b42c-4ea2-a4d7-6461ca631a55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.645344] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0699883-bcc4-480b-8cc0-42c1ca3205ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.679585] env[62619]: DEBUG nova.compute.manager [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1705.684138] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1ec90c-f8c0-46f5-99c1-15dc6e25a172 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.694304] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a83be73-458b-480d-9fba-e5c2418f231a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.710901] env[62619]: DEBUG nova.compute.provider_tree [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1705.712247] env[62619]: DEBUG nova.network.neutron [-] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.713376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-529e2b2e-8ffa-4962-8a3a-aecb7e49c88a tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "34180944-02f7-4115-8178-64f2f2591080" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.196s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.835638] env[62619]: DEBUG nova.compute.manager [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1705.835873] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1705.836904] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cbee1f-9a9c-487e-81e8-6f190f79b095 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.846337] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1705.846488] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-100ed1d5-faab-406a-b751-b227ab7e22a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.856539] env[62619]: DEBUG oslo_vmware.api [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1705.856539] env[62619]: value = "task-1778074" [ 1705.856539] env[62619]: _type = "Task" [ 1705.856539] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.870559] env[62619]: DEBUG oslo_vmware.api [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1778074, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.895009] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c181b339-3101-41b8-b67f-cd8d4fc1e6c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.907669] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144be8f0-2568-4522-9b9d-a4cd92bb4b67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.947714] env[62619]: DEBUG nova.compute.manager [req-c1d08a57-c818-4625-94da-2f108167a1e3 req-2d44a191-7624-435d-b67d-bdf5dfbeb019 service nova] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Detach interface failed, port_id=f289fb71-1285-4a29-9580-10815cd08cba, reason: Instance cbff225f-2d11-4a43-a320-95dd3afb8e48 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1705.974858] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1705.975797] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63132bb-5c0c-48ed-8039-7810c435fdba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.984107] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1705.984380] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09f87452-8cba-4752-88be-49136c3733da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.091992] env[62619]: DEBUG nova.compute.manager [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1706.094550] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1706.094753] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1706.094933] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleting the datastore file [datastore1] 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1706.097440] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec85af03-a23a-4638-abd7-d52aeacfe3ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.105300] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1706.105300] env[62619]: value = "task-1778076" [ 1706.105300] env[62619]: _type = "Task" [ 1706.105300] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.114146] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.118491] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.215280] env[62619]: DEBUG nova.scheduler.client.report [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1706.219153] env[62619]: INFO nova.compute.manager [-] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Took 1.43 seconds to deallocate network for instance. [ 1706.378832] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquiring lock "32aed8cd-1583-4253-bfb6-a98610e2f32e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.379330] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lock "32aed8cd-1583-4253-bfb6-a98610e2f32e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.380650] env[62619]: DEBUG oslo_vmware.api [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1778074, 'name': PowerOffVM_Task, 'duration_secs': 0.350432} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.381424] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1706.381711] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1706.382080] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98878ed7-a6a1-4ca3-a917-96b3a70dc38d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.482021] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1706.482021] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1706.482021] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Deleting the datastore file [datastore1] a6ba8114-0261-4894-98c0-9e0360f6d256 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1706.482021] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc8c5551-701b-4576-9ec5-75bd9c877b9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.490823] env[62619]: DEBUG oslo_vmware.api [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for the task: (returnval){ [ 1706.490823] env[62619]: value = "task-1778078" [ 1706.490823] env[62619]: _type = "Task" [ 1706.490823] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.498117] env[62619]: DEBUG oslo_vmware.api [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1778078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.617627] env[62619]: DEBUG oslo_vmware.api [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266395} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.618882] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.619370] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1706.619900] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1706.620246] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1706.642205] env[62619]: INFO nova.scheduler.client.report [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleted allocations for instance 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 [ 1706.696482] env[62619]: DEBUG nova.compute.manager [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1707.457994] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.803s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.460477] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.461063] env[62619]: DEBUG nova.compute.manager [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1707.467027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.467027] env[62619]: DEBUG nova.compute.manager [req-fd7e68d9-638c-4d4b-b6a2-49a1e51c34eb req-c69f829c-0792-40cb-905f-c29bf2f14172 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Received event network-vif-plugged-47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1707.467027] env[62619]: DEBUG oslo_concurrency.lockutils [req-fd7e68d9-638c-4d4b-b6a2-49a1e51c34eb req-c69f829c-0792-40cb-905f-c29bf2f14172 service nova] Acquiring lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.467027] env[62619]: DEBUG oslo_concurrency.lockutils [req-fd7e68d9-638c-4d4b-b6a2-49a1e51c34eb req-c69f829c-0792-40cb-905f-c29bf2f14172 service nova] Lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.467027] env[62619]: DEBUG oslo_concurrency.lockutils [req-fd7e68d9-638c-4d4b-b6a2-49a1e51c34eb req-c69f829c-0792-40cb-905f-c29bf2f14172 service nova] Lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.467027] env[62619]: DEBUG nova.compute.manager [req-fd7e68d9-638c-4d4b-b6a2-49a1e51c34eb req-c69f829c-0792-40cb-905f-c29bf2f14172 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] No waiting events found dispatching network-vif-plugged-47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1707.467027] env[62619]: WARNING nova.compute.manager [req-fd7e68d9-638c-4d4b-b6a2-49a1e51c34eb req-c69f829c-0792-40cb-905f-c29bf2f14172 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Received unexpected event network-vif-plugged-47138162-63de-413e-8a1d-767355e11190 for instance with vm_state building and task_state spawning. [ 1707.467027] env[62619]: DEBUG nova.network.neutron [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Successfully updated port: 47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1707.469708] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.715s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.471102] env[62619]: INFO nova.compute.claims [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1707.479837] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "34180944-02f7-4115-8178-64f2f2591080" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.479837] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "34180944-02f7-4115-8178-64f2f2591080" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.487760] env[62619]: DEBUG oslo_vmware.api [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Task: {'id': task-1778078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.320417} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.490704] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1707.490978] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1707.491201] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1707.491425] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1707.491608] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1707.491788] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1707.492046] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1707.492247] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1707.492450] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1707.492645] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1707.492851] env[62619]: DEBUG nova.virt.hardware [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1707.493189] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1707.493400] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1707.493605] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1707.493814] env[62619]: INFO nova.compute.manager [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1707.494093] env[62619]: DEBUG oslo.service.loopingcall [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1707.495154] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dd577a-bf11-493c-94b6-5a99cdfc4857 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.497796] env[62619]: DEBUG nova.compute.manager [-] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1707.497934] env[62619]: DEBUG nova.network.neutron [-] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1707.508151] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643b7750-8470-4d0c-91e1-35f28880a9b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.512454] env[62619]: INFO nova.scheduler.client.report [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Deleted allocations for instance b3d9c418-f521-4770-a381-5238be6cc33c [ 1707.637251] env[62619]: DEBUG nova.compute.manager [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Received event network-vif-unplugged-a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1707.637458] env[62619]: DEBUG oslo_concurrency.lockutils [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] Acquiring lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.637703] env[62619]: DEBUG oslo_concurrency.lockutils [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.638734] env[62619]: DEBUG oslo_concurrency.lockutils [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.638734] env[62619]: DEBUG nova.compute.manager [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] No waiting events found dispatching network-vif-unplugged-a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1707.639048] env[62619]: WARNING nova.compute.manager [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Received unexpected event network-vif-unplugged-a10c5399-b021-4ea7-8a41-4d58136aff12 for instance with vm_state shelved_offloaded and task_state None. [ 1707.639308] env[62619]: DEBUG nova.compute.manager [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Received event network-changed-a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1707.639393] env[62619]: DEBUG nova.compute.manager [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Refreshing instance network info cache due to event network-changed-a10c5399-b021-4ea7-8a41-4d58136aff12. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1707.639534] env[62619]: DEBUG oslo_concurrency.lockutils [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] Acquiring lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.639673] env[62619]: DEBUG oslo_concurrency.lockutils [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] Acquired lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.639830] env[62619]: DEBUG nova.network.neutron [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Refreshing network info cache for port a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1707.978581] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.978903] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquired lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.978945] env[62619]: DEBUG nova.network.neutron [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1707.983138] env[62619]: DEBUG nova.compute.utils [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1707.985946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.027773] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1827cec3-519e-45f6-b7de-94b45cd4f95d tempest-ImagesTestJSON-37875623 tempest-ImagesTestJSON-37875623-project-member] Lock "b3d9c418-f521-4770-a381-5238be6cc33c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.876s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.345885] env[62619]: DEBUG nova.network.neutron [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updated VIF entry in instance network info cache for port a10c5399-b021-4ea7-8a41-4d58136aff12. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1708.346302] env[62619]: DEBUG nova.network.neutron [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updating instance_info_cache with network_info: [{"id": "a10c5399-b021-4ea7-8a41-4d58136aff12", "address": "fa:16:3e:14:4c:3d", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": null, "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapa10c5399-b0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.449971] env[62619]: DEBUG nova.network.neutron [-] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.487186] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "34180944-02f7-4115-8178-64f2f2591080" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.523473] env[62619]: DEBUG nova.network.neutron [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1708.740277] env[62619]: DEBUG nova.network.neutron [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updating instance_info_cache with network_info: [{"id": "47138162-63de-413e-8a1d-767355e11190", "address": "fa:16:3e:02:cd:09", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47138162-63", "ovs_interfaceid": "47138162-63de-413e-8a1d-767355e11190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.803109] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b9c3c9-3782-4380-a69a-d370b955266f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.814855] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6fd207-bc58-4385-b82b-2d63db36dbb1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.847892] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de3dadb-c8fb-4056-bb1f-755d6a443d74 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.851697] env[62619]: DEBUG oslo_concurrency.lockutils [req-92c5b3a3-246e-4aa0-b71a-364774f03caa req-3f0d2004-85bd-4495-96c7-57054fd7be91 service nova] Releasing lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.859467] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e36e4d0-4095-483d-b2f1-601ecc78661e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.875355] env[62619]: DEBUG nova.compute.provider_tree [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1708.952699] env[62619]: INFO nova.compute.manager [-] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Took 1.45 seconds to deallocate network for instance. [ 1709.246477] env[62619]: DEBUG nova.compute.manager [req-74bd6f3f-38ac-449d-90d4-8e2041e39a12 req-a25ed4b0-0f15-47f0-adc6-672071a4fc5b service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Received event network-changed-47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1709.246477] env[62619]: DEBUG nova.compute.manager [req-74bd6f3f-38ac-449d-90d4-8e2041e39a12 req-a25ed4b0-0f15-47f0-adc6-672071a4fc5b service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Refreshing instance network info cache due to event network-changed-47138162-63de-413e-8a1d-767355e11190. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1709.246477] env[62619]: DEBUG oslo_concurrency.lockutils [req-74bd6f3f-38ac-449d-90d4-8e2041e39a12 req-a25ed4b0-0f15-47f0-adc6-672071a4fc5b service nova] Acquiring lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.246477] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Releasing lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.246477] env[62619]: DEBUG nova.compute.manager [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Instance network_info: |[{"id": "47138162-63de-413e-8a1d-767355e11190", "address": "fa:16:3e:02:cd:09", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47138162-63", "ovs_interfaceid": "47138162-63de-413e-8a1d-767355e11190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1709.246477] env[62619]: DEBUG oslo_concurrency.lockutils [req-74bd6f3f-38ac-449d-90d4-8e2041e39a12 req-a25ed4b0-0f15-47f0-adc6-672071a4fc5b service nova] Acquired lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.246935] env[62619]: DEBUG nova.network.neutron [req-74bd6f3f-38ac-449d-90d4-8e2041e39a12 req-a25ed4b0-0f15-47f0-adc6-672071a4fc5b service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Refreshing network info cache for port 47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1709.248033] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:cd:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '678ebbe4-4c53-4eaf-a689-93981310f37d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47138162-63de-413e-8a1d-767355e11190', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1709.255211] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Creating folder: Project (079ddd8f5dc14fa699b4961995733f95). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1709.256688] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e41bcc84-433d-4a7e-b353-4d7fcb9a6b1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.273151] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Created folder: Project (079ddd8f5dc14fa699b4961995733f95) in parent group-v368875. [ 1709.273151] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Creating folder: Instances. Parent ref: group-v369100. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1709.273151] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa8d2429-2102-4e68-af1f-b3ff1b7d21d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.290794] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Created folder: Instances in parent group-v369100. [ 1709.291059] env[62619]: DEBUG oslo.service.loopingcall [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1709.291305] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1709.291467] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20a9a44e-234f-41b4-aa42-a7d1119b6675 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.312118] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1709.312118] env[62619]: value = "task-1778081" [ 1709.312118] env[62619]: _type = "Task" [ 1709.312118] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.315589] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "34180944-02f7-4115-8178-64f2f2591080" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.315829] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "34180944-02f7-4115-8178-64f2f2591080" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.316068] env[62619]: INFO nova.compute.manager [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Attaching volume 9efe1269-d4a0-4383-8573-f7d5886afd9f to /dev/sdb [ 1709.323200] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778081, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.363391] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0710dff-e773-4a2c-8bda-75c7e20efe3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.369857] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66cf86b8-0855-4a17-940f-8a6deed38d42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.393282] env[62619]: DEBUG nova.virt.block_device [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Updating existing volume attachment record: 0d80e251-36e6-4106-abf7-8d8f7e4c71ea {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1709.398972] env[62619]: ERROR nova.scheduler.client.report [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [req-2f1d0d5b-04b4-4a50-8c0e-da82e4988710] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2f1d0d5b-04b4-4a50-8c0e-da82e4988710"}]} [ 1709.418300] env[62619]: DEBUG nova.scheduler.client.report [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1709.441639] env[62619]: DEBUG nova.scheduler.client.report [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1709.441639] env[62619]: DEBUG nova.compute.provider_tree [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1709.454417] env[62619]: DEBUG nova.scheduler.client.report [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1709.459905] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.479550] env[62619]: DEBUG nova.scheduler.client.report [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1709.567231] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.799508] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591b8ef1-aea7-40dd-904a-d1391b1bcc68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.812957] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017af881-21bb-474d-ac1e-d1ebf22152b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.828981] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778081, 'name': CreateVM_Task, 'duration_secs': 0.392482} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.865188] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1709.866604] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.866840] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.867204] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1709.868134] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1931f336-4e79-4df5-aace-e75e58e44935 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.871144] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f133b1dd-a467-45d2-ab6e-6ac7dda4ce98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.881062] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1709.881062] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528157ca-a068-f591-b3a2-fd7a6299d066" [ 1709.881062] env[62619]: _type = "Task" [ 1709.881062] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.885377] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b60bc7-65dc-45cf-b6d4-96902e74d2e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.896096] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528157ca-a068-f591-b3a2-fd7a6299d066, 'name': SearchDatastore_Task, 'duration_secs': 0.013362} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.904850] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.905086] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1709.905389] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.905603] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.905826] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1709.906438] env[62619]: DEBUG nova.compute.provider_tree [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1709.907946] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df8469fc-9c4f-4ab9-847e-c1c0f28e0a8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.920104] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1709.920647] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1709.921036] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b7a31b8-4112-49a2-bda9-43fecddf21d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.927195] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1709.927195] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f2f7e6-0086-97fe-5534-5411d0e2a9bc" [ 1709.927195] env[62619]: _type = "Task" [ 1709.927195] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.935809] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f2f7e6-0086-97fe-5534-5411d0e2a9bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.159095] env[62619]: DEBUG nova.network.neutron [req-74bd6f3f-38ac-449d-90d4-8e2041e39a12 req-a25ed4b0-0f15-47f0-adc6-672071a4fc5b service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updated VIF entry in instance network info cache for port 47138162-63de-413e-8a1d-767355e11190. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1710.159491] env[62619]: DEBUG nova.network.neutron [req-74bd6f3f-38ac-449d-90d4-8e2041e39a12 req-a25ed4b0-0f15-47f0-adc6-672071a4fc5b service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updating instance_info_cache with network_info: [{"id": "47138162-63de-413e-8a1d-767355e11190", "address": "fa:16:3e:02:cd:09", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47138162-63", "ovs_interfaceid": "47138162-63de-413e-8a1d-767355e11190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.440669] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f2f7e6-0086-97fe-5534-5411d0e2a9bc, 'name': SearchDatastore_Task, 'duration_secs': 0.032886} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.441034] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a66b0a1d-5af8-4a61-8efa-984baf920737 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.447902] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1710.447902] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524ab6c1-91c0-6bd6-44e3-6af1d5c14882" [ 1710.447902] env[62619]: _type = "Task" [ 1710.447902] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.462038] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524ab6c1-91c0-6bd6-44e3-6af1d5c14882, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.463310] env[62619]: DEBUG nova.scheduler.client.report [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 119 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1710.463965] env[62619]: DEBUG nova.compute.provider_tree [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 119 to 120 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1710.467788] env[62619]: DEBUG nova.compute.provider_tree [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1710.663585] env[62619]: DEBUG oslo_concurrency.lockutils [req-74bd6f3f-38ac-449d-90d4-8e2041e39a12 req-a25ed4b0-0f15-47f0-adc6-672071a4fc5b service nova] Releasing lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1710.663891] env[62619]: DEBUG nova.compute.manager [req-74bd6f3f-38ac-449d-90d4-8e2041e39a12 req-a25ed4b0-0f15-47f0-adc6-672071a4fc5b service nova] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Received event network-vif-deleted-fac1bf3e-4c83-47cb-9a34-2a16035eb800 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1710.959854] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524ab6c1-91c0-6bd6-44e3-6af1d5c14882, 'name': SearchDatastore_Task, 'duration_secs': 0.013064} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.960129] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1710.960379] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4858096a-9683-4a7c-bbeb-4e6b2f5401cf/4858096a-9683-4a7c-bbeb-4e6b2f5401cf.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1710.961050] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0252798-32c9-4ceb-ad0f-f5392e3d7db1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.970139] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1710.970139] env[62619]: value = "task-1778085" [ 1710.970139] env[62619]: _type = "Task" [ 1710.970139] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.971081] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.501s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.971476] env[62619]: DEBUG nova.compute.manager [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1710.981131] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.030s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.982594] env[62619]: INFO nova.compute.claims [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1710.991632] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778085, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.476469] env[62619]: DEBUG nova.compute.utils [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1711.484787] env[62619]: DEBUG nova.compute.manager [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1711.485074] env[62619]: DEBUG nova.network.neutron [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1711.487748] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778085, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.550742] env[62619]: DEBUG nova.policy [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f08e93082004ef9b726e62a39d3277c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4fe880c492946f397195ffc5bcd566e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1711.871169] env[62619]: DEBUG nova.network.neutron [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Successfully created port: ac59f3e2-0841-445b-8907-932fdd3f4d2c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1711.982880] env[62619]: DEBUG nova.compute.manager [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1711.988230] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778085, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.835242} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.988685] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4858096a-9683-4a7c-bbeb-4e6b2f5401cf/4858096a-9683-4a7c-bbeb-4e6b2f5401cf.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1711.988924] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1711.989212] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4fb7577-509f-41f5-b4bd-ebc61d40b231 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.002919] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1712.002919] env[62619]: value = "task-1778087" [ 1712.002919] env[62619]: _type = "Task" [ 1712.002919] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.013807] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778087, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.262407] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e54b537-6f6f-4eb7-93fa-994164a9fb8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.270904] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ce225a-b4c7-4595-b2ef-858ebd586376 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.302509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcdb6154-e326-4085-9963-e0f6d4f4112f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.310733] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b414e6-ea99-4c67-8c77-07419183830e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.324238] env[62619]: DEBUG nova.compute.provider_tree [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1712.490686] env[62619]: INFO nova.virt.block_device [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Booting with volume cd357a5b-17b3-4318-9f18-e8e95fb8d9d9 at /dev/sda [ 1712.513013] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778087, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.17619} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.513179] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1712.513958] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1b81b3-a65b-4df9-a060-ad6142db6f8b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.536339] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 4858096a-9683-4a7c-bbeb-4e6b2f5401cf/4858096a-9683-4a7c-bbeb-4e6b2f5401cf.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1712.537521] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fac645b-4769-4d35-899e-a211c39972e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.551132] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-940249fc-2952-44d2-903c-cac3a1ac0cc1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.562554] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54be8aa5-5423-456b-8b22-91288defdb57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.572674] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1712.572674] env[62619]: value = "task-1778088" [ 1712.572674] env[62619]: _type = "Task" [ 1712.572674] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.581533] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778088, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.594157] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4ab8e26-8356-40a0-9a84-85a3ceb1e35d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.603087] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0142f034-9310-4983-bda0-e54abe9aeea8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.635710] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319cca45-6bda-4c99-a79c-f879cb5ffcd3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.643017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162b539c-3d99-4b20-adda-e60c31f5b880 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.658420] env[62619]: DEBUG nova.virt.block_device [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Updating existing volume attachment record: 87661a6b-84d2-431d-97fa-c060d60c105d {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1712.858016] env[62619]: DEBUG nova.scheduler.client.report [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1712.858496] env[62619]: DEBUG nova.compute.provider_tree [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 120 to 121 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1712.858605] env[62619]: DEBUG nova.compute.provider_tree [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1713.083204] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778088, 'name': ReconfigVM_Task, 'duration_secs': 0.268321} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.083495] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 4858096a-9683-4a7c-bbeb-4e6b2f5401cf/4858096a-9683-4a7c-bbeb-4e6b2f5401cf.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1713.084166] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ce3858b-80ba-45cb-8009-cab7b37cd20a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.090970] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1713.090970] env[62619]: value = "task-1778089" [ 1713.090970] env[62619]: _type = "Task" [ 1713.090970] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.099618] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778089, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.336565] env[62619]: DEBUG nova.compute.manager [req-5d4d9f2b-8920-4001-a841-44961c95f1d2 req-fe4e767b-0454-4f30-8391-2801c3a1c017 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Received event network-vif-plugged-ac59f3e2-0841-445b-8907-932fdd3f4d2c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1713.336908] env[62619]: DEBUG oslo_concurrency.lockutils [req-5d4d9f2b-8920-4001-a841-44961c95f1d2 req-fe4e767b-0454-4f30-8391-2801c3a1c017 service nova] Acquiring lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.337023] env[62619]: DEBUG oslo_concurrency.lockutils [req-5d4d9f2b-8920-4001-a841-44961c95f1d2 req-fe4e767b-0454-4f30-8391-2801c3a1c017 service nova] Lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.337156] env[62619]: DEBUG oslo_concurrency.lockutils [req-5d4d9f2b-8920-4001-a841-44961c95f1d2 req-fe4e767b-0454-4f30-8391-2801c3a1c017 service nova] Lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.337341] env[62619]: DEBUG nova.compute.manager [req-5d4d9f2b-8920-4001-a841-44961c95f1d2 req-fe4e767b-0454-4f30-8391-2801c3a1c017 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] No waiting events found dispatching network-vif-plugged-ac59f3e2-0841-445b-8907-932fdd3f4d2c {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1713.337475] env[62619]: WARNING nova.compute.manager [req-5d4d9f2b-8920-4001-a841-44961c95f1d2 req-fe4e767b-0454-4f30-8391-2801c3a1c017 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Received unexpected event network-vif-plugged-ac59f3e2-0841-445b-8907-932fdd3f4d2c for instance with vm_state building and task_state block_device_mapping. [ 1713.364038] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.384s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.364590] env[62619]: DEBUG nova.compute.manager [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1713.367431] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.276s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.368801] env[62619]: INFO nova.compute.claims [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1713.435364] env[62619]: DEBUG nova.network.neutron [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Successfully updated port: ac59f3e2-0841-445b-8907-932fdd3f4d2c {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1713.601954] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778089, 'name': Rename_Task, 'duration_secs': 0.153748} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.602309] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1713.602508] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7bd62069-31c5-45ec-adda-4492d2913f2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.609840] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1713.609840] env[62619]: value = "task-1778090" [ 1713.609840] env[62619]: _type = "Task" [ 1713.609840] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.620079] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778090, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.742974] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "11869077-b428-413f-9f8f-7eac08d2d9ec" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.743149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.874700] env[62619]: DEBUG nova.compute.utils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1713.879786] env[62619]: DEBUG nova.compute.manager [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1713.879997] env[62619]: DEBUG nova.network.neutron [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1713.937330] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Acquiring lock "refresh_cache-f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.937473] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Acquired lock "refresh_cache-f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.937619] env[62619]: DEBUG nova.network.neutron [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1713.944346] env[62619]: DEBUG nova.policy [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe6ee1c8d8ef4b718274da7be4f5fd01', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c82fb42e93ff479b971f49eb92f50832', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1714.128533] env[62619]: DEBUG oslo_vmware.api [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778090, 'name': PowerOnVM_Task, 'duration_secs': 0.478752} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.130370] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1714.130650] env[62619]: INFO nova.compute.manager [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Took 7.44 seconds to spawn the instance on the hypervisor. [ 1714.130899] env[62619]: DEBUG nova.compute.manager [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1714.131765] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae374b4-1386-4771-98d3-f9dca82796f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.247635] env[62619]: DEBUG nova.compute.utils [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1714.256828] env[62619]: DEBUG nova.network.neutron [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Successfully created port: 5911524f-a8b5-4591-a312-ea0cefac24df {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1714.387571] env[62619]: DEBUG nova.compute.manager [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1714.456156] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1714.456156] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369104', 'volume_id': '9efe1269-d4a0-4383-8573-f7d5886afd9f', 'name': 'volume-9efe1269-d4a0-4383-8573-f7d5886afd9f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '34180944-02f7-4115-8178-64f2f2591080', 'attached_at': '', 'detached_at': '', 'volume_id': '9efe1269-d4a0-4383-8573-f7d5886afd9f', 'serial': '9efe1269-d4a0-4383-8573-f7d5886afd9f'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1714.456731] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6d798a-9f26-4226-9329-bde49663986f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.480357] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8a12d3-b22e-46f4-bc2c-0fe8cc20c0f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.509122] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] volume-9efe1269-d4a0-4383-8573-f7d5886afd9f/volume-9efe1269-d4a0-4383-8573-f7d5886afd9f.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1714.513470] env[62619]: DEBUG nova.network.neutron [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1714.514929] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7595f8d5-a82c-473b-8c03-b67e5dfc1821 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.536926] env[62619]: DEBUG oslo_vmware.api [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1714.536926] env[62619]: value = "task-1778091" [ 1714.536926] env[62619]: _type = "Task" [ 1714.536926] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.552457] env[62619]: DEBUG oslo_vmware.api [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778091, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.660061] env[62619]: INFO nova.compute.manager [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Took 17.88 seconds to build instance. [ 1714.743072] env[62619]: DEBUG nova.network.neutron [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Updating instance_info_cache with network_info: [{"id": "ac59f3e2-0841-445b-8907-932fdd3f4d2c", "address": "fa:16:3e:11:2c:88", "network": {"id": "cc0cf77c-033d-4cc4-b7f8-b832204e998a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1661750215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4fe880c492946f397195ffc5bcd566e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac59f3e2-08", "ovs_interfaceid": "ac59f3e2-0841-445b-8907-932fdd3f4d2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.752126] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.761132] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40dd2bd-a26e-4068-a168-2e51ac01df24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.769871] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c5b72d-b9fc-4070-bb94-a7d11465599b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.776121] env[62619]: DEBUG nova.compute.manager [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1714.776797] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1714.776925] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1714.777058] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1714.777510] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1714.777510] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1714.777510] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1714.777709] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1714.777868] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1714.778019] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1714.778185] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1714.778352] env[62619]: DEBUG nova.virt.hardware [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1714.779146] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1351e10-1a15-445c-8d57-86e313005ce7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.812715] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389375c8-d07a-41f0-9b16-89fa87735355 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.817972] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3045b8b8-6b05-4114-8863-78032e472a36 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.833788] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcdc64ca-8d66-4f1d-bd2e-421f62420f27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.848371] env[62619]: DEBUG nova.compute.provider_tree [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1715.049592] env[62619]: DEBUG oslo_vmware.api [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778091, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.163133] env[62619]: DEBUG oslo_concurrency.lockutils [None req-01816882-91aa-465d-9c0f-6f061353f109 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.395s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.248045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Releasing lock "refresh_cache-f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.248591] env[62619]: DEBUG nova.compute.manager [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Instance network_info: |[{"id": "ac59f3e2-0841-445b-8907-932fdd3f4d2c", "address": "fa:16:3e:11:2c:88", "network": {"id": "cc0cf77c-033d-4cc4-b7f8-b832204e998a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1661750215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4fe880c492946f397195ffc5bcd566e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac59f3e2-08", "ovs_interfaceid": "ac59f3e2-0841-445b-8907-932fdd3f4d2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1715.249216] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:2c:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56834f67-27a8-43dc-bbc6-a74aaa08959b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac59f3e2-0841-445b-8907-932fdd3f4d2c', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1715.257644] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Creating folder: Project (f4fe880c492946f397195ffc5bcd566e). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1715.257966] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38d34642-c9cd-4eb0-8dff-80f963b7ce5f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.273100] env[62619]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1715.273297] env[62619]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62619) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1715.273674] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Folder already exists: Project (f4fe880c492946f397195ffc5bcd566e). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1715.273865] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Creating folder: Instances. Parent ref: group-v369086. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1715.274129] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5aa863d-6c1e-466d-9b15-8009e9fc8b45 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.285835] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Created folder: Instances in parent group-v369086. [ 1715.286103] env[62619]: DEBUG oslo.service.loopingcall [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1715.286306] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1715.286855] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70615c68-adff-4b63-8fb5-8887299e6abd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.310273] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1715.310273] env[62619]: value = "task-1778094" [ 1715.310273] env[62619]: _type = "Task" [ 1715.310273] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.319385] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778094, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.352479] env[62619]: DEBUG nova.scheduler.client.report [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1715.399182] env[62619]: DEBUG nova.compute.manager [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1715.439242] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1715.439242] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1715.439242] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1715.439242] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1715.439242] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1715.439242] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1715.439242] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1715.439242] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1715.440621] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1715.440825] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1715.441018] env[62619]: DEBUG nova.virt.hardware [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1715.445397] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e75c751-872d-4996-b5d5-68d9dde720d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.453809] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfd9e26-9cab-4668-abaa-d4cea6ef6432 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.549256] env[62619]: DEBUG oslo_vmware.api [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778091, 'name': ReconfigVM_Task, 'duration_secs': 0.679069} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.549711] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Reconfigured VM instance instance-00000051 to attach disk [datastore1] volume-9efe1269-d4a0-4383-8573-f7d5886afd9f/volume-9efe1269-d4a0-4383-8573-f7d5886afd9f.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1715.557265] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-617ec836-345d-4c7c-99a1-e521bc554cc4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.580165] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquiring lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.580969] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.590187] env[62619]: DEBUG oslo_vmware.api [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1715.590187] env[62619]: value = "task-1778095" [ 1715.590187] env[62619]: _type = "Task" [ 1715.590187] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.604183] env[62619]: DEBUG oslo_vmware.api [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778095, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.618029] env[62619]: DEBUG nova.compute.manager [req-3b0c9d46-b4f7-42a9-89c3-d2fd74dab387 req-0405223b-965d-4484-833d-db28bdc53d60 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Received event network-changed-ac59f3e2-0841-445b-8907-932fdd3f4d2c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1715.618297] env[62619]: DEBUG nova.compute.manager [req-3b0c9d46-b4f7-42a9-89c3-d2fd74dab387 req-0405223b-965d-4484-833d-db28bdc53d60 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Refreshing instance network info cache due to event network-changed-ac59f3e2-0841-445b-8907-932fdd3f4d2c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1715.619038] env[62619]: DEBUG oslo_concurrency.lockutils [req-3b0c9d46-b4f7-42a9-89c3-d2fd74dab387 req-0405223b-965d-4484-833d-db28bdc53d60 service nova] Acquiring lock "refresh_cache-f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.619162] env[62619]: DEBUG oslo_concurrency.lockutils [req-3b0c9d46-b4f7-42a9-89c3-d2fd74dab387 req-0405223b-965d-4484-833d-db28bdc53d60 service nova] Acquired lock "refresh_cache-f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.619997] env[62619]: DEBUG nova.network.neutron [req-3b0c9d46-b4f7-42a9-89c3-d2fd74dab387 req-0405223b-965d-4484-833d-db28bdc53d60 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Refreshing network info cache for port ac59f3e2-0841-445b-8907-932fdd3f4d2c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1715.823882] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778094, 'name': CreateVM_Task, 'duration_secs': 0.422994} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.824473] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1715.825675] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'boot_index': 0, 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369095', 'volume_id': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'name': 'volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'serial': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9'}, 'attachment_id': '87661a6b-84d2-431d-97fa-c060d60c105d', 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=62619) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1715.825988] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Root volume attach. Driver type: vmdk {{(pid=62619) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1715.827207] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da0a015-42ca-4008-b919-704238aa1976 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.836261] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "11869077-b428-413f-9f8f-7eac08d2d9ec" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.836733] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.837263] env[62619]: INFO nova.compute.manager [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Attaching volume b0427cd3-da5b-4bb2-96ed-03dc601c6f51 to /dev/sdb [ 1715.840576] env[62619]: DEBUG nova.network.neutron [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Successfully updated port: 5911524f-a8b5-4591-a312-ea0cefac24df {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1715.843333] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0513c83-ebd6-48c4-a4a8-614a9b05743e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.860873] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9d8a50-413b-47d4-a006-a6120bf19085 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.861939] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.862812] env[62619]: DEBUG nova.compute.manager [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1715.867297] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.603s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.868848] env[62619]: INFO nova.compute.claims [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1715.880864] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-c53016ec-275a-4c79-bbf7-58669e986d7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.891709] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for the task: (returnval){ [ 1715.891709] env[62619]: value = "task-1778096" [ 1715.891709] env[62619]: _type = "Task" [ 1715.891709] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.897448] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f9d968-2497-4d13-aade-038638dca2ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.908167] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1d4ee5-d245-499c-a90c-a328d111fa5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.911059] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778096, 'name': RelocateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.925153] env[62619]: DEBUG nova.virt.block_device [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Updating existing volume attachment record: bdf851df-2d4d-4482-bf0d-3f69a7d66bcf {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1716.084459] env[62619]: DEBUG nova.compute.manager [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1716.101199] env[62619]: DEBUG oslo_vmware.api [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778095, 'name': ReconfigVM_Task, 'duration_secs': 0.157243} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.102151] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369104', 'volume_id': '9efe1269-d4a0-4383-8573-f7d5886afd9f', 'name': 'volume-9efe1269-d4a0-4383-8573-f7d5886afd9f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '34180944-02f7-4115-8178-64f2f2591080', 'attached_at': '', 'detached_at': '', 'volume_id': '9efe1269-d4a0-4383-8573-f7d5886afd9f', 'serial': '9efe1269-d4a0-4383-8573-f7d5886afd9f'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1716.331435] env[62619]: DEBUG nova.network.neutron [req-3b0c9d46-b4f7-42a9-89c3-d2fd74dab387 req-0405223b-965d-4484-833d-db28bdc53d60 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Updated VIF entry in instance network info cache for port ac59f3e2-0841-445b-8907-932fdd3f4d2c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1716.331862] env[62619]: DEBUG nova.network.neutron [req-3b0c9d46-b4f7-42a9-89c3-d2fd74dab387 req-0405223b-965d-4484-833d-db28bdc53d60 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Updating instance_info_cache with network_info: [{"id": "ac59f3e2-0841-445b-8907-932fdd3f4d2c", "address": "fa:16:3e:11:2c:88", "network": {"id": "cc0cf77c-033d-4cc4-b7f8-b832204e998a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1661750215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4fe880c492946f397195ffc5bcd566e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac59f3e2-08", "ovs_interfaceid": "ac59f3e2-0841-445b-8907-932fdd3f4d2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.349890] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.350077] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.350230] env[62619]: DEBUG nova.network.neutron [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1716.368900] env[62619]: DEBUG nova.compute.utils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1716.371292] env[62619]: DEBUG nova.compute.manager [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1716.373307] env[62619]: DEBUG nova.network.neutron [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1716.404992] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778096, 'name': RelocateVM_Task} progress is 19%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.418327] env[62619]: DEBUG nova.policy [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25159d73422b45dbbe4bab2b2a835055', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df50ba9d97ac4c059077c87f9cfdb719', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1716.606417] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.722284] env[62619]: DEBUG nova.network.neutron [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Successfully created port: 9567749e-9f9f-4b05-a445-3099dd2cdff8 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1716.837466] env[62619]: DEBUG oslo_concurrency.lockutils [req-3b0c9d46-b4f7-42a9-89c3-d2fd74dab387 req-0405223b-965d-4484-833d-db28bdc53d60 service nova] Releasing lock "refresh_cache-f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.875375] env[62619]: DEBUG nova.compute.manager [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1716.884428] env[62619]: DEBUG nova.network.neutron [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1716.903738] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778096, 'name': RelocateVM_Task, 'duration_secs': 0.7863} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.904293] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1716.904557] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369095', 'volume_id': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'name': 'volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'serial': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1716.905828] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03f9d8b-7586-4234-9faf-598be32fadf1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.936036] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fb62a6-548e-454c-a1b3-611aefc8110e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.964634] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9/volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1716.968337] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7c6905e-f84f-4124-a48a-fd9d68a959b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.995941] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for the task: (returnval){ [ 1716.995941] env[62619]: value = "task-1778100" [ 1716.995941] env[62619]: _type = "Task" [ 1716.995941] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.009799] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778100, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.046501] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "b84dd91a-8e08-4476-9683-655357d18370" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.046767] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "b84dd91a-8e08-4476-9683-655357d18370" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.112239] env[62619]: DEBUG nova.network.neutron [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance_info_cache with network_info: [{"id": "5911524f-a8b5-4591-a312-ea0cefac24df", "address": "fa:16:3e:bd:c4:f9", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5911524f-a8", "ovs_interfaceid": "5911524f-a8b5-4591-a312-ea0cefac24df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.149268] env[62619]: DEBUG nova.objects.instance [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lazy-loading 'flavor' on Instance uuid 34180944-02f7-4115-8178-64f2f2591080 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1717.259022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8b3f6c-ca3e-4740-9007-bb944cbf59ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.269258] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d329048e-85bd-4cd1-80cd-f745f321b7a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.302391] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552680ca-069a-4e9e-b86b-28655b4cafcc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.311047] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a62f27b-ef27-4409-b040-988c8d51f19a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.325804] env[62619]: DEBUG nova.compute.provider_tree [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1717.510072] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778100, 'name': ReconfigVM_Task, 'duration_secs': 0.283023} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.510373] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Reconfigured VM instance instance-00000053 to attach disk [datastore1] volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9/volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1717.515206] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9c37246-dde3-4499-90f2-c7e5f2641f0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.531852] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for the task: (returnval){ [ 1717.531852] env[62619]: value = "task-1778101" [ 1717.531852] env[62619]: _type = "Task" [ 1717.531852] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.543930] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778101, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.549632] env[62619]: DEBUG nova.compute.manager [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1717.613998] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.614368] env[62619]: DEBUG nova.compute.manager [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Instance network_info: |[{"id": "5911524f-a8b5-4591-a312-ea0cefac24df", "address": "fa:16:3e:bd:c4:f9", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5911524f-a8", "ovs_interfaceid": "5911524f-a8b5-4591-a312-ea0cefac24df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1717.614844] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:c4:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '816c6e38-e200-4544-8c5b-9fc3e16c5761', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5911524f-a8b5-4591-a312-ea0cefac24df', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1717.622897] env[62619]: DEBUG oslo.service.loopingcall [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1717.622897] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1717.623080] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32407845-ff8a-45ad-b5f7-e58b06726154 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.645627] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1717.645627] env[62619]: value = "task-1778102" [ 1717.645627] env[62619]: _type = "Task" [ 1717.645627] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.651677] env[62619]: DEBUG nova.compute.manager [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Received event network-vif-plugged-5911524f-a8b5-4591-a312-ea0cefac24df {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1717.651677] env[62619]: DEBUG oslo_concurrency.lockutils [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] Acquiring lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.652186] env[62619]: DEBUG oslo_concurrency.lockutils [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.652186] env[62619]: DEBUG oslo_concurrency.lockutils [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.652370] env[62619]: DEBUG nova.compute.manager [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] No waiting events found dispatching network-vif-plugged-5911524f-a8b5-4591-a312-ea0cefac24df {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1717.652421] env[62619]: WARNING nova.compute.manager [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Received unexpected event network-vif-plugged-5911524f-a8b5-4591-a312-ea0cefac24df for instance with vm_state building and task_state spawning. [ 1717.652564] env[62619]: DEBUG nova.compute.manager [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Received event network-changed-5911524f-a8b5-4591-a312-ea0cefac24df {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1717.652713] env[62619]: DEBUG nova.compute.manager [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Refreshing instance network info cache due to event network-changed-5911524f-a8b5-4591-a312-ea0cefac24df. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1717.652889] env[62619]: DEBUG oslo_concurrency.lockutils [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] Acquiring lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.653030] env[62619]: DEBUG oslo_concurrency.lockutils [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] Acquired lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.653186] env[62619]: DEBUG nova.network.neutron [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Refreshing network info cache for port 5911524f-a8b5-4591-a312-ea0cefac24df {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1717.657175] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dcc3d36b-0726-4f22-bc2d-9a21d9ae517b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "34180944-02f7-4115-8178-64f2f2591080" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.341s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.677323] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778102, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.846229] env[62619]: ERROR nova.scheduler.client.report [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [req-c37eddf8-e4b9-4c89-a7cf-26cdf35d159f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c37eddf8-e4b9-4c89-a7cf-26cdf35d159f"}]} [ 1717.861673] env[62619]: DEBUG nova.scheduler.client.report [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1717.876589] env[62619]: DEBUG nova.scheduler.client.report [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1717.876811] env[62619]: DEBUG nova.compute.provider_tree [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1717.884754] env[62619]: DEBUG nova.compute.manager [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1717.888241] env[62619]: DEBUG nova.scheduler.client.report [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1717.905945] env[62619]: DEBUG nova.scheduler.client.report [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1717.921209] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1717.921452] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1717.921607] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1717.921786] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1717.921929] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1717.922088] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1717.922294] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1717.922447] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1717.922607] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1717.922764] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1717.922973] env[62619]: DEBUG nova.virt.hardware [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1717.923865] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbae80ec-bf93-4663-a29c-6c362eb2d7c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.934303] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d5876d-ae62-439f-8cf0-fd359c51bacd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.044067] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778101, 'name': ReconfigVM_Task, 'duration_secs': 0.182638} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.044579] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369095', 'volume_id': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'name': 'volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'serial': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1718.045348] env[62619]: DEBUG oslo_concurrency.lockutils [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "34180944-02f7-4115-8178-64f2f2591080" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.045348] env[62619]: DEBUG oslo_concurrency.lockutils [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "34180944-02f7-4115-8178-64f2f2591080" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.045470] env[62619]: DEBUG oslo_concurrency.lockutils [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "34180944-02f7-4115-8178-64f2f2591080-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.045609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "34180944-02f7-4115-8178-64f2f2591080-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.045775] env[62619]: DEBUG oslo_concurrency.lockutils [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "34180944-02f7-4115-8178-64f2f2591080-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.047307] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3734c5ec-75ee-4dca-848b-f47e3c63cde7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.051362] env[62619]: INFO nova.compute.manager [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Terminating instance [ 1718.061144] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for the task: (returnval){ [ 1718.061144] env[62619]: value = "task-1778103" [ 1718.061144] env[62619]: _type = "Task" [ 1718.061144] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.074109] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778103, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.077455] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.160597] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778102, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.206373] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9c4ead-2cc7-412e-927b-9282aac4cd6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.214175] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3d644c-a542-4433-9d82-477b5037e150 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.252682] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b5d12b-706d-423a-8588-e3903c13580a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.264174] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70ae829-c84b-4dee-99c7-acf930f72b45 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.278017] env[62619]: DEBUG nova.compute.provider_tree [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1718.298933] env[62619]: DEBUG nova.network.neutron [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Successfully updated port: 9567749e-9f9f-4b05-a445-3099dd2cdff8 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1718.435431] env[62619]: DEBUG nova.network.neutron [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updated VIF entry in instance network info cache for port 5911524f-a8b5-4591-a312-ea0cefac24df. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1718.435594] env[62619]: DEBUG nova.network.neutron [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance_info_cache with network_info: [{"id": "5911524f-a8b5-4591-a312-ea0cefac24df", "address": "fa:16:3e:bd:c4:f9", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5911524f-a8", "ovs_interfaceid": "5911524f-a8b5-4591-a312-ea0cefac24df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.557391] env[62619]: DEBUG nova.compute.manager [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1718.557634] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1718.557918] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bc02675-f3c2-4b2a-ae12-282e3d553ab4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.569693] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1718.569693] env[62619]: value = "task-1778105" [ 1718.569693] env[62619]: _type = "Task" [ 1718.569693] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.577589] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778103, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.583056] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.656171] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778102, 'name': CreateVM_Task, 'duration_secs': 0.537242} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.656387] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1718.657095] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.657274] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.657708] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1718.657968] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c08b8229-3d24-4ed7-a335-ab0060facc19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.673391] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1718.673391] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52254309-36ce-f007-7304-23af2e27e21a" [ 1718.673391] env[62619]: _type = "Task" [ 1718.673391] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.682311] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52254309-36ce-f007-7304-23af2e27e21a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.801316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.801452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.801541] env[62619]: DEBUG nova.network.neutron [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1718.812988] env[62619]: DEBUG nova.scheduler.client.report [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 123 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1718.813347] env[62619]: DEBUG nova.compute.provider_tree [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 123 to 124 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1718.813641] env[62619]: DEBUG nova.compute.provider_tree [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1718.938566] env[62619]: DEBUG oslo_concurrency.lockutils [req-13ec3588-f9e9-4238-bf20-71b8d317a734 req-f370cb0a-daa9-49d6-9751-d851a7dacf3d service nova] Releasing lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.073735] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778103, 'name': Rename_Task, 'duration_secs': 0.737829} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.078099] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1719.078099] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a018db16-3302-4b8a-9feb-1c712ccb6fc4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.083577] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778105, 'name': PowerOffVM_Task, 'duration_secs': 0.220775} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.084651] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1719.084850] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1719.085048] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369104', 'volume_id': '9efe1269-d4a0-4383-8573-f7d5886afd9f', 'name': 'volume-9efe1269-d4a0-4383-8573-f7d5886afd9f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '34180944-02f7-4115-8178-64f2f2591080', 'attached_at': '', 'detached_at': '', 'volume_id': '9efe1269-d4a0-4383-8573-f7d5886afd9f', 'serial': '9efe1269-d4a0-4383-8573-f7d5886afd9f'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1719.085353] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for the task: (returnval){ [ 1719.085353] env[62619]: value = "task-1778106" [ 1719.085353] env[62619]: _type = "Task" [ 1719.085353] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.086022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc5493e-b2ac-4ce6-96af-a08d70eed679 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.095974] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778106, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.112469] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789d4214-f41a-484f-b8f8-13edaf4b24f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.119773] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0786cc95-cc00-45ce-8dec-e35b198ac225 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.142740] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b56109d-8de0-46ba-b581-48daa25f7488 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.160718] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] The volume has not been displaced from its original location: [datastore1] volume-9efe1269-d4a0-4383-8573-f7d5886afd9f/volume-9efe1269-d4a0-4383-8573-f7d5886afd9f.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1719.166089] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Reconfiguring VM instance instance-00000051 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1719.166440] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e62a23a-cde7-4a90-96a8-f10da7eb728d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.189741] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52254309-36ce-f007-7304-23af2e27e21a, 'name': SearchDatastore_Task, 'duration_secs': 0.010634} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.191117] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.191352] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1719.191580] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.191721] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.191892] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1719.192224] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1719.192224] env[62619]: value = "task-1778107" [ 1719.192224] env[62619]: _type = "Task" [ 1719.192224] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.192409] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0962ae22-a5bf-4cfd-9b51-63e14694ea85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.203348] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778107, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.206181] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1719.206416] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1719.207168] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3250d69-5b50-47f9-ab23-e838d95e543f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.212687] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1719.212687] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5227711c-6ef8-c357-d948-fb610bf42a10" [ 1719.212687] env[62619]: _type = "Task" [ 1719.212687] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.221128] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5227711c-6ef8-c357-d948-fb610bf42a10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.320811] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.453s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.321458] env[62619]: DEBUG nova.compute.manager [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1719.324193] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.206s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.325638] env[62619]: INFO nova.compute.claims [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1719.395272] env[62619]: DEBUG nova.network.neutron [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1719.418614] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.418849] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.598796] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778106, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.639914] env[62619]: DEBUG nova.network.neutron [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance_info_cache with network_info: [{"id": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "address": "fa:16:3e:7e:df:71", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9567749e-9f", "ovs_interfaceid": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.706953] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778107, 'name': ReconfigVM_Task, 'duration_secs': 0.253763} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.707306] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Reconfigured VM instance instance-00000051 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1719.712477] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc26c041-8ecb-4b61-9e1f-0b1103d11029 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.733249] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5227711c-6ef8-c357-d948-fb610bf42a10, 'name': SearchDatastore_Task, 'duration_secs': 0.018109} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.735175] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1719.735175] env[62619]: value = "task-1778108" [ 1719.735175] env[62619]: _type = "Task" [ 1719.735175] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.735382] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87dc2fca-7fb8-40d8-bfad-af9f39f8ffb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.748634] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778108, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.750577] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1719.750577] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5212a95a-996d-3abb-faed-bc567322ce23" [ 1719.750577] env[62619]: _type = "Task" [ 1719.750577] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.758079] env[62619]: DEBUG nova.compute.manager [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Received event network-vif-plugged-9567749e-9f9f-4b05-a445-3099dd2cdff8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1719.758079] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] Acquiring lock "7cb51b51-514d-4223-a82a-5cdbdab9482a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.758079] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.758079] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.758079] env[62619]: DEBUG nova.compute.manager [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] No waiting events found dispatching network-vif-plugged-9567749e-9f9f-4b05-a445-3099dd2cdff8 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1719.758079] env[62619]: WARNING nova.compute.manager [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Received unexpected event network-vif-plugged-9567749e-9f9f-4b05-a445-3099dd2cdff8 for instance with vm_state building and task_state spawning. [ 1719.758440] env[62619]: DEBUG nova.compute.manager [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Received event network-changed-9567749e-9f9f-4b05-a445-3099dd2cdff8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1719.758568] env[62619]: DEBUG nova.compute.manager [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Refreshing instance network info cache due to event network-changed-9567749e-9f9f-4b05-a445-3099dd2cdff8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1719.758838] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] Acquiring lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.767935] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5212a95a-996d-3abb-faed-bc567322ce23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.829667] env[62619]: DEBUG nova.compute.utils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1719.832822] env[62619]: DEBUG nova.compute.manager [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1719.833425] env[62619]: DEBUG nova.network.neutron [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1719.922726] env[62619]: DEBUG nova.policy [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e9094d6b3854c1184307d9bc35a966e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e11e1bca0c747fd8b4a0ca3e220ba4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1719.931079] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.931079] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1720.100021] env[62619]: DEBUG oslo_vmware.api [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778106, 'name': PowerOnVM_Task, 'duration_secs': 0.721285} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.100614] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1720.100614] env[62619]: INFO nova.compute.manager [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Took 5.32 seconds to spawn the instance on the hypervisor. [ 1720.100839] env[62619]: DEBUG nova.compute.manager [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1720.101509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1387d2b2-1988-413e-9314-37e2838a6ac6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.142587] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.142948] env[62619]: DEBUG nova.compute.manager [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Instance network_info: |[{"id": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "address": "fa:16:3e:7e:df:71", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9567749e-9f", "ovs_interfaceid": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1720.143272] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] Acquired lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.143446] env[62619]: DEBUG nova.network.neutron [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Refreshing network info cache for port 9567749e-9f9f-4b05-a445-3099dd2cdff8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1720.144801] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:df:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9567749e-9f9f-4b05-a445-3099dd2cdff8', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1720.152577] env[62619]: DEBUG oslo.service.loopingcall [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1720.155903] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1720.156754] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53b719c3-45a4-4e1c-8035-d36b8ffd1eaa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.179967] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1720.179967] env[62619]: value = "task-1778109" [ 1720.179967] env[62619]: _type = "Task" [ 1720.179967] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.189217] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778109, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.249553] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778108, 'name': ReconfigVM_Task, 'duration_secs': 0.321121} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.250367] env[62619]: DEBUG nova.network.neutron [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Successfully created port: d680f022-949d-41b7-b22b-7900ed9b02f9 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1720.252176] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369104', 'volume_id': '9efe1269-d4a0-4383-8573-f7d5886afd9f', 'name': 'volume-9efe1269-d4a0-4383-8573-f7d5886afd9f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '34180944-02f7-4115-8178-64f2f2591080', 'attached_at': '', 'detached_at': '', 'volume_id': '9efe1269-d4a0-4383-8573-f7d5886afd9f', 'serial': '9efe1269-d4a0-4383-8573-f7d5886afd9f'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1720.252517] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1720.253439] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8108b73-afce-4650-bba0-042ec151aaf8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.268111] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5212a95a-996d-3abb-faed-bc567322ce23, 'name': SearchDatastore_Task, 'duration_secs': 0.024792} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.270156] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.270428] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 917960ca-3870-4e4e-aafe-3c6d77cf7c51/917960ca-3870-4e4e-aafe-3c6d77cf7c51.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1720.270719] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1720.270928] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd356cf4-e3aa-408a-a799-70da60f8359f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.273251] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4eca35cf-fffc-47d8-85c4-a1e3db7628c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.281334] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1720.281334] env[62619]: value = "task-1778110" [ 1720.281334] env[62619]: _type = "Task" [ 1720.281334] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.293741] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778110, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.336092] env[62619]: DEBUG nova.compute.manager [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1720.386603] env[62619]: DEBUG nova.network.neutron [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updated VIF entry in instance network info cache for port 9567749e-9f9f-4b05-a445-3099dd2cdff8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1720.387097] env[62619]: DEBUG nova.network.neutron [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance_info_cache with network_info: [{"id": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "address": "fa:16:3e:7e:df:71", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9567749e-9f", "ovs_interfaceid": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.485880] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1720.486155] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369108', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'name': 'volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '11869077-b428-413f-9f8f-7eac08d2d9ec', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'serial': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1720.487436] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14672fab-f96a-4cf2-8e9c-ff076cbd319a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.508764] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd81b2be-e53e-4f38-a16c-3bbeace16f42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.539454] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51/volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1720.542659] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe86a62b-c517-4743-8055-abff24db5a6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.563248] env[62619]: DEBUG oslo_vmware.api [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1720.563248] env[62619]: value = "task-1778112" [ 1720.563248] env[62619]: _type = "Task" [ 1720.563248] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.574149] env[62619]: DEBUG oslo_vmware.api [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778112, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.623384] env[62619]: INFO nova.compute.manager [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Took 18.90 seconds to build instance. [ 1720.691625] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1720.691625] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1720.695404] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleting the datastore file [datastore1] 34180944-02f7-4115-8178-64f2f2591080 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1720.695404] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88090111-10f6-415f-a2c2-6619097bb3e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.699082] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778109, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.707327] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1720.707327] env[62619]: value = "task-1778113" [ 1720.707327] env[62619]: _type = "Task" [ 1720.707327] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.720216] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.738857] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f64f26-1d05-4bd9-908a-edce49a65454 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.749221] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57aa0bbe-94e6-45db-90d8-bc1982edfbdd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.785070] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2861cc-cf04-49b1-9ef7-fb28b5c4b1bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.802853] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f53d85-f668-4d6b-9e8d-eca1380a36ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.807308] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778110, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48344} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.807604] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 917960ca-3870-4e4e-aafe-3c6d77cf7c51/917960ca-3870-4e4e-aafe-3c6d77cf7c51.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1720.811076] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1720.811076] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e8b1c52-1c43-4f74-a8c7-7e1a626b97bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.821101] env[62619]: DEBUG nova.compute.provider_tree [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1720.829148] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1720.829148] env[62619]: value = "task-1778114" [ 1720.829148] env[62619]: _type = "Task" [ 1720.829148] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.840140] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778114, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.889620] env[62619]: DEBUG oslo_concurrency.lockutils [req-1cbfbbc0-a0c4-4474-afa0-92ef3ec58f3c req-b69453a1-98d9-44a5-a2f5-cfb097ed03a4 service nova] Releasing lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.073316] env[62619]: DEBUG oslo_vmware.api [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778112, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.125656] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c49608e-694d-44a6-88dd-28c81bc6d69f tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.409s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.192630] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778109, 'name': CreateVM_Task, 'duration_secs': 0.762893} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.192816] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1721.193520] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.193689] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.194028] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1721.194299] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-307d2619-54a1-4e56-97f5-4d52b9385068 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.200123] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1721.200123] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280dbea-7634-664c-5421-d5fba74a0699" [ 1721.200123] env[62619]: _type = "Task" [ 1721.200123] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.209323] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280dbea-7634-664c-5421-d5fba74a0699, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.217906] env[62619]: DEBUG oslo_vmware.api [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177498} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.218182] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1721.218367] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1721.218566] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1721.218741] env[62619]: INFO nova.compute.manager [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Took 2.66 seconds to destroy the instance on the hypervisor. [ 1721.218972] env[62619]: DEBUG oslo.service.loopingcall [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1721.219196] env[62619]: DEBUG nova.compute.manager [-] [instance: 34180944-02f7-4115-8178-64f2f2591080] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1721.219324] env[62619]: DEBUG nova.network.neutron [-] [instance: 34180944-02f7-4115-8178-64f2f2591080] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1721.340761] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778114, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13131} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.341452] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1721.342406] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a91e90-b734-4662-b7a8-5fef05c833b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.357266] env[62619]: DEBUG nova.compute.manager [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1721.367512] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 917960ca-3870-4e4e-aafe-3c6d77cf7c51/917960ca-3870-4e4e-aafe-3c6d77cf7c51.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1721.368550] env[62619]: DEBUG nova.scheduler.client.report [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1721.368785] env[62619]: DEBUG nova.compute.provider_tree [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 124 to 125 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1721.368971] env[62619]: DEBUG nova.compute.provider_tree [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1721.376812] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d3d38d6-7259-427c-8279-d0cd4ffe1ebd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.403666] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1721.403666] env[62619]: value = "task-1778115" [ 1721.403666] env[62619]: _type = "Task" [ 1721.403666] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.409666] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1721.409905] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1721.410106] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1721.410334] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1721.410488] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1721.410635] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1721.410844] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1721.411010] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1721.411188] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1721.411348] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1721.411518] env[62619]: DEBUG nova.virt.hardware [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1721.412596] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2f534b-4f75-4281-8677-511fba885c33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.424009] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9e224a-ffe4-4473-bff5-f059a8f4a626 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.428752] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778115, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.574724] env[62619]: DEBUG oslo_vmware.api [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778112, 'name': ReconfigVM_Task, 'duration_secs': 0.726717} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.575016] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfigured VM instance instance-0000004b to attach disk [datastore1] volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51/volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1721.581159] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe6ffcd7-14ba-4ee8-a119-816577e1a436 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.601800] env[62619]: DEBUG oslo_vmware.api [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1721.601800] env[62619]: value = "task-1778116" [ 1721.601800] env[62619]: _type = "Task" [ 1721.601800] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.611452] env[62619]: DEBUG oslo_vmware.api [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778116, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.713032] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280dbea-7634-664c-5421-d5fba74a0699, 'name': SearchDatastore_Task, 'duration_secs': 0.010879} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.713563] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.713829] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1721.714955] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.714955] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.715168] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1721.715603] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14dc07eb-4c4f-4cc9-b189-7d593c1dc49f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.728096] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1721.728407] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1721.729170] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc0e0a14-7950-4173-8486-50b971bdc31c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.738579] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1721.738579] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52938514-f444-dd5f-d849-ff1d8dda318b" [ 1721.738579] env[62619]: _type = "Task" [ 1721.738579] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.753193] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52938514-f444-dd5f-d849-ff1d8dda318b, 'name': SearchDatastore_Task, 'duration_secs': 0.010771} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.753193] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-553e5a5e-86ba-4045-9b1c-85bd87260e41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.761511] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1721.761511] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526a98ba-9c0a-375b-f450-b6e56ec1b266" [ 1721.761511] env[62619]: _type = "Task" [ 1721.761511] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.768734] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526a98ba-9c0a-375b-f450-b6e56ec1b266, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.782545] env[62619]: DEBUG nova.compute.manager [req-6f3c1d8e-ccf6-44cf-a33b-3374e061d46d req-9ccce0d0-02e3-46bf-a50c-2f2f8b47e91b service nova] [instance: a3101076-36d6-409a-8072-638107e63073] Received event network-vif-plugged-d680f022-949d-41b7-b22b-7900ed9b02f9 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1721.782616] env[62619]: DEBUG oslo_concurrency.lockutils [req-6f3c1d8e-ccf6-44cf-a33b-3374e061d46d req-9ccce0d0-02e3-46bf-a50c-2f2f8b47e91b service nova] Acquiring lock "a3101076-36d6-409a-8072-638107e63073-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.782811] env[62619]: DEBUG oslo_concurrency.lockutils [req-6f3c1d8e-ccf6-44cf-a33b-3374e061d46d req-9ccce0d0-02e3-46bf-a50c-2f2f8b47e91b service nova] Lock "a3101076-36d6-409a-8072-638107e63073-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.782978] env[62619]: DEBUG oslo_concurrency.lockutils [req-6f3c1d8e-ccf6-44cf-a33b-3374e061d46d req-9ccce0d0-02e3-46bf-a50c-2f2f8b47e91b service nova] Lock "a3101076-36d6-409a-8072-638107e63073-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.783155] env[62619]: DEBUG nova.compute.manager [req-6f3c1d8e-ccf6-44cf-a33b-3374e061d46d req-9ccce0d0-02e3-46bf-a50c-2f2f8b47e91b service nova] [instance: a3101076-36d6-409a-8072-638107e63073] No waiting events found dispatching network-vif-plugged-d680f022-949d-41b7-b22b-7900ed9b02f9 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1721.783319] env[62619]: WARNING nova.compute.manager [req-6f3c1d8e-ccf6-44cf-a33b-3374e061d46d req-9ccce0d0-02e3-46bf-a50c-2f2f8b47e91b service nova] [instance: a3101076-36d6-409a-8072-638107e63073] Received unexpected event network-vif-plugged-d680f022-949d-41b7-b22b-7900ed9b02f9 for instance with vm_state building and task_state spawning. [ 1721.879190] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.879190] env[62619]: DEBUG nova.compute.manager [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1721.883158] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.263s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.883692] env[62619]: INFO nova.compute.claims [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1721.898445] env[62619]: DEBUG nova.compute.manager [req-3e55c303-41ac-4fb4-9ff9-f721ab819f07 req-733fc8e2-1fdf-4884-b012-19e8e98af3c1 service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Received event network-vif-deleted-ef12af5c-f7e0-44d8-9222-fa7401efd43d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1721.898445] env[62619]: INFO nova.compute.manager [req-3e55c303-41ac-4fb4-9ff9-f721ab819f07 req-733fc8e2-1fdf-4884-b012-19e8e98af3c1 service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Neutron deleted interface ef12af5c-f7e0-44d8-9222-fa7401efd43d; detaching it from the instance and deleting it from the info cache [ 1721.898445] env[62619]: DEBUG nova.network.neutron [req-3e55c303-41ac-4fb4-9ff9-f721ab819f07 req-733fc8e2-1fdf-4884-b012-19e8e98af3c1 service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1721.911949] env[62619]: DEBUG nova.network.neutron [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Successfully updated port: d680f022-949d-41b7-b22b-7900ed9b02f9 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1721.922126] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778115, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.113893] env[62619]: DEBUG oslo_vmware.api [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778116, 'name': ReconfigVM_Task, 'duration_secs': 0.271057} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.114209] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369108', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'name': 'volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '11869077-b428-413f-9f8f-7eac08d2d9ec', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'serial': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1722.271732] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526a98ba-9c0a-375b-f450-b6e56ec1b266, 'name': SearchDatastore_Task, 'duration_secs': 0.010251} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.272103] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.272293] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 7cb51b51-514d-4223-a82a-5cdbdab9482a/7cb51b51-514d-4223-a82a-5cdbdab9482a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1722.272625] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b8cf22b-897e-49ce-918d-c0d2a45f4fde {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.282171] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1722.282171] env[62619]: value = "task-1778117" [ 1722.282171] env[62619]: _type = "Task" [ 1722.282171] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.291160] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.365782] env[62619]: DEBUG nova.network.neutron [-] [instance: 34180944-02f7-4115-8178-64f2f2591080] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.383070] env[62619]: DEBUG nova.compute.utils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1722.384663] env[62619]: DEBUG nova.compute.manager [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1722.384875] env[62619]: DEBUG nova.network.neutron [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1722.399437] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bdc5d9b-1e97-4a11-85b2-8e0e1027a489 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.412706] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecea2c5-637f-4e88-b2ba-26157d996df0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.427706] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "refresh_cache-a3101076-36d6-409a-8072-638107e63073" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.427706] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "refresh_cache-a3101076-36d6-409a-8072-638107e63073" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.427996] env[62619]: DEBUG nova.network.neutron [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1722.429693] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778115, 'name': ReconfigVM_Task, 'duration_secs': 0.7319} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.430660] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 917960ca-3870-4e4e-aafe-3c6d77cf7c51/917960ca-3870-4e4e-aafe-3c6d77cf7c51.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1722.431357] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5cb368a7-ebe6-4de9-8b6d-0fe2403d537d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.434986] env[62619]: DEBUG nova.policy [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cbe4a8a89e44105969767acfcf8764d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb5d393c514d41f78fd4ea45d2f888a8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1722.439924] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1722.439924] env[62619]: value = "task-1778118" [ 1722.439924] env[62619]: _type = "Task" [ 1722.439924] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.453352] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778118, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.471903] env[62619]: DEBUG nova.compute.manager [req-3e55c303-41ac-4fb4-9ff9-f721ab819f07 req-733fc8e2-1fdf-4884-b012-19e8e98af3c1 service nova] [instance: 34180944-02f7-4115-8178-64f2f2591080] Detach interface failed, port_id=ef12af5c-f7e0-44d8-9222-fa7401efd43d, reason: Instance 34180944-02f7-4115-8178-64f2f2591080 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1722.771303] env[62619]: DEBUG nova.network.neutron [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Successfully created port: 9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1722.792949] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778117, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450296} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.794131] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 7cb51b51-514d-4223-a82a-5cdbdab9482a/7cb51b51-514d-4223-a82a-5cdbdab9482a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1722.794131] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1722.794131] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bbcb175-9d33-46d9-9984-945b120270c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.801659] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1722.801659] env[62619]: value = "task-1778119" [ 1722.801659] env[62619]: _type = "Task" [ 1722.801659] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.810570] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778119, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.869073] env[62619]: INFO nova.compute.manager [-] [instance: 34180944-02f7-4115-8178-64f2f2591080] Took 1.65 seconds to deallocate network for instance. [ 1722.889220] env[62619]: DEBUG nova.compute.manager [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1722.956391] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778118, 'name': Rename_Task, 'duration_secs': 0.287389} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.960151] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1722.960151] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07e60920-ca45-41c5-b0f8-5ef0bb717a3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.967466] env[62619]: DEBUG nova.network.neutron [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1722.971854] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1722.971854] env[62619]: value = "task-1778120" [ 1722.971854] env[62619]: _type = "Task" [ 1722.971854] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.984306] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778120, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.145571] env[62619]: DEBUG nova.network.neutron [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Updating instance_info_cache with network_info: [{"id": "d680f022-949d-41b7-b22b-7900ed9b02f9", "address": "fa:16:3e:51:7a:fc", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd680f022-94", "ovs_interfaceid": "d680f022-949d-41b7-b22b-7900ed9b02f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.156496] env[62619]: DEBUG nova.objects.instance [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lazy-loading 'flavor' on Instance uuid 11869077-b428-413f-9f8f-7eac08d2d9ec {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1723.287546] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd48054-9257-4ded-90ee-78b77c7434d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.296118] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7501a7da-9be0-4488-9885-6229976f3d5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.330940] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52aeb657-1d48-4df3-b7a2-d6af59ed589d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.336507] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778119, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071237} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.337130] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1723.337867] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afdecc8-3941-4392-8f84-8b556d1ec92f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.343583] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb02a29-8066-40f7-b879-d078bf75fe18 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.366241] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 7cb51b51-514d-4223-a82a-5cdbdab9482a/7cb51b51-514d-4223-a82a-5cdbdab9482a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1723.366994] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0265ea1b-8ff6-4117-87a7-389fc4b0c6c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.390188] env[62619]: DEBUG nova.compute.provider_tree [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.401426] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1723.401426] env[62619]: value = "task-1778121" [ 1723.401426] env[62619]: _type = "Task" [ 1723.401426] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.413172] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778121, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.437176] env[62619]: INFO nova.compute.manager [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 34180944-02f7-4115-8178-64f2f2591080] Took 0.57 seconds to detach 1 volumes for instance. [ 1723.482453] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778120, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.648137] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "refresh_cache-a3101076-36d6-409a-8072-638107e63073" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.648519] env[62619]: DEBUG nova.compute.manager [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Instance network_info: |[{"id": "d680f022-949d-41b7-b22b-7900ed9b02f9", "address": "fa:16:3e:51:7a:fc", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd680f022-94", "ovs_interfaceid": "d680f022-949d-41b7-b22b-7900ed9b02f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1723.648968] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:7a:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd680f022-949d-41b7-b22b-7900ed9b02f9', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1723.656865] env[62619]: DEBUG oslo.service.loopingcall [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1723.657121] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3101076-36d6-409a-8072-638107e63073] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1723.657319] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86c759e7-f6cb-4b03-88e6-75fc25f2dd8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.674433] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1c2182c2-2c85-4aee-a34f-6cf02e959812 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.838s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.679838] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1723.679838] env[62619]: value = "task-1778122" [ 1723.679838] env[62619]: _type = "Task" [ 1723.679838] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.688271] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778122, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.872473] env[62619]: DEBUG nova.compute.manager [req-58e6b9d4-a976-4bf1-8561-9e1df7a73e7c req-da036f2a-d252-4b80-99ee-6da96f3d587d service nova] [instance: a3101076-36d6-409a-8072-638107e63073] Received event network-changed-d680f022-949d-41b7-b22b-7900ed9b02f9 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1723.872724] env[62619]: DEBUG nova.compute.manager [req-58e6b9d4-a976-4bf1-8561-9e1df7a73e7c req-da036f2a-d252-4b80-99ee-6da96f3d587d service nova] [instance: a3101076-36d6-409a-8072-638107e63073] Refreshing instance network info cache due to event network-changed-d680f022-949d-41b7-b22b-7900ed9b02f9. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1723.872984] env[62619]: DEBUG oslo_concurrency.lockutils [req-58e6b9d4-a976-4bf1-8561-9e1df7a73e7c req-da036f2a-d252-4b80-99ee-6da96f3d587d service nova] Acquiring lock "refresh_cache-a3101076-36d6-409a-8072-638107e63073" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.873180] env[62619]: DEBUG oslo_concurrency.lockutils [req-58e6b9d4-a976-4bf1-8561-9e1df7a73e7c req-da036f2a-d252-4b80-99ee-6da96f3d587d service nova] Acquired lock "refresh_cache-a3101076-36d6-409a-8072-638107e63073" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.873408] env[62619]: DEBUG nova.network.neutron [req-58e6b9d4-a976-4bf1-8561-9e1df7a73e7c req-da036f2a-d252-4b80-99ee-6da96f3d587d service nova] [instance: a3101076-36d6-409a-8072-638107e63073] Refreshing network info cache for port d680f022-949d-41b7-b22b-7900ed9b02f9 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1723.893752] env[62619]: DEBUG nova.scheduler.client.report [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1723.904353] env[62619]: DEBUG nova.compute.manager [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1723.918640] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.936126] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1723.936126] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1723.936126] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1723.936309] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1723.936463] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1723.936611] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1723.936815] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1723.936970] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1723.937190] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1723.937361] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1723.937552] env[62619]: DEBUG nova.virt.hardware [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1723.939415] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e923b4-5fb6-4497-a921-35c4fff734d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.944448] env[62619]: DEBUG oslo_concurrency.lockutils [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.945609] env[62619]: DEBUG nova.compute.manager [req-c32d1122-5b67-4eed-93fc-48c17771063a req-1545d661-0af9-4b20-a8c0-d176e4294b67 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Received event network-changed-ac59f3e2-0841-445b-8907-932fdd3f4d2c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1723.945790] env[62619]: DEBUG nova.compute.manager [req-c32d1122-5b67-4eed-93fc-48c17771063a req-1545d661-0af9-4b20-a8c0-d176e4294b67 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Refreshing instance network info cache due to event network-changed-ac59f3e2-0841-445b-8907-932fdd3f4d2c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1723.946016] env[62619]: DEBUG oslo_concurrency.lockutils [req-c32d1122-5b67-4eed-93fc-48c17771063a req-1545d661-0af9-4b20-a8c0-d176e4294b67 service nova] Acquiring lock "refresh_cache-f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.946213] env[62619]: DEBUG oslo_concurrency.lockutils [req-c32d1122-5b67-4eed-93fc-48c17771063a req-1545d661-0af9-4b20-a8c0-d176e4294b67 service nova] Acquired lock "refresh_cache-f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.946391] env[62619]: DEBUG nova.network.neutron [req-c32d1122-5b67-4eed-93fc-48c17771063a req-1545d661-0af9-4b20-a8c0-d176e4294b67 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Refreshing network info cache for port ac59f3e2-0841-445b-8907-932fdd3f4d2c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1723.956432] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e576028-83d5-4dfe-b629-6e23977140a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.961694] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1723.961996] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.962772] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.962994] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.963463] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.963627] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.963781] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.963910] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1723.964094] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.989453] env[62619]: DEBUG oslo_vmware.api [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778120, 'name': PowerOnVM_Task, 'duration_secs': 1.018222} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.990040] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1723.990291] env[62619]: INFO nova.compute.manager [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Took 8.59 seconds to spawn the instance on the hypervisor. [ 1723.990479] env[62619]: DEBUG nova.compute.manager [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1723.991389] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e35da6-5b08-443e-8d93-f767400de5e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.197489] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778122, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.403141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.403141] env[62619]: DEBUG nova.compute.manager [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1724.406138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.946s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.406397] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.408563] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.945s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.408853] env[62619]: DEBUG nova.objects.instance [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lazy-loading 'resources' on Instance uuid 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1724.423359] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778121, 'name': ReconfigVM_Task, 'duration_secs': 0.70061} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.423895] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 7cb51b51-514d-4223-a82a-5cdbdab9482a/7cb51b51-514d-4223-a82a-5cdbdab9482a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1724.424707] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb3538a4-8bc6-42ee-9aa4-f08be95835db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.433606] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1724.433606] env[62619]: value = "task-1778123" [ 1724.433606] env[62619]: _type = "Task" [ 1724.433606] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.444351] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778123, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.453850] env[62619]: INFO nova.scheduler.client.report [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted allocations for instance cbff225f-2d11-4a43-a320-95dd3afb8e48 [ 1724.468906] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.514453] env[62619]: INFO nova.compute.manager [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Took 20.59 seconds to build instance. [ 1724.612023] env[62619]: INFO nova.compute.manager [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Rebuilding instance [ 1724.653231] env[62619]: DEBUG nova.compute.manager [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1724.654490] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d647020d-0522-4d8c-8295-97d8d4fb50c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.690993] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778122, 'name': CreateVM_Task, 'duration_secs': 0.54408} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.691178] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3101076-36d6-409a-8072-638107e63073] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1724.691789] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.691953] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.692541] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1724.692820] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e57811b-08ee-4720-89ba-e5f26006656b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.698042] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1724.698042] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523a01ab-7bed-9b86-a0f0-051544b15d51" [ 1724.698042] env[62619]: _type = "Task" [ 1724.698042] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.698791] env[62619]: DEBUG nova.network.neutron [req-58e6b9d4-a976-4bf1-8561-9e1df7a73e7c req-da036f2a-d252-4b80-99ee-6da96f3d587d service nova] [instance: a3101076-36d6-409a-8072-638107e63073] Updated VIF entry in instance network info cache for port d680f022-949d-41b7-b22b-7900ed9b02f9. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1724.699162] env[62619]: DEBUG nova.network.neutron [req-58e6b9d4-a976-4bf1-8561-9e1df7a73e7c req-da036f2a-d252-4b80-99ee-6da96f3d587d service nova] [instance: a3101076-36d6-409a-8072-638107e63073] Updating instance_info_cache with network_info: [{"id": "d680f022-949d-41b7-b22b-7900ed9b02f9", "address": "fa:16:3e:51:7a:fc", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd680f022-94", "ovs_interfaceid": "d680f022-949d-41b7-b22b-7900ed9b02f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.715625] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523a01ab-7bed-9b86-a0f0-051544b15d51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.716525] env[62619]: DEBUG nova.network.neutron [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Successfully updated port: 9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1724.789747] env[62619]: DEBUG nova.network.neutron [req-c32d1122-5b67-4eed-93fc-48c17771063a req-1545d661-0af9-4b20-a8c0-d176e4294b67 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Updated VIF entry in instance network info cache for port ac59f3e2-0841-445b-8907-932fdd3f4d2c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1724.790147] env[62619]: DEBUG nova.network.neutron [req-c32d1122-5b67-4eed-93fc-48c17771063a req-1545d661-0af9-4b20-a8c0-d176e4294b67 service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Updating instance_info_cache with network_info: [{"id": "ac59f3e2-0841-445b-8907-932fdd3f4d2c", "address": "fa:16:3e:11:2c:88", "network": {"id": "cc0cf77c-033d-4cc4-b7f8-b832204e998a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1661750215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4fe880c492946f397195ffc5bcd566e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac59f3e2-08", "ovs_interfaceid": "ac59f3e2-0841-445b-8907-932fdd3f4d2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.915792] env[62619]: DEBUG nova.compute.utils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1724.917810] env[62619]: DEBUG nova.objects.instance [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lazy-loading 'numa_topology' on Instance uuid 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1724.919288] env[62619]: DEBUG nova.compute.manager [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1724.919512] env[62619]: DEBUG nova.network.neutron [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1724.944978] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778123, 'name': Rename_Task, 'duration_secs': 0.174609} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.944978] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1724.946079] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea093a26-7e98-49b0-badb-cabb4b058478 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.954920] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1724.954920] env[62619]: value = "task-1778124" [ 1724.954920] env[62619]: _type = "Task" [ 1724.954920] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.965365] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778124, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.965819] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cdd7aca6-5325-4e3c-a5ff-63815aa7c524 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "cbff225f-2d11-4a43-a320-95dd3afb8e48" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.468s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.981242] env[62619]: DEBUG nova.policy [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cbe4a8a89e44105969767acfcf8764d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb5d393c514d41f78fd4ea45d2f888a8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1725.016733] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0dbc152c-94d5-42b6-966d-19dc0458c503 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.106s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.207118] env[62619]: DEBUG oslo_concurrency.lockutils [req-58e6b9d4-a976-4bf1-8561-9e1df7a73e7c req-da036f2a-d252-4b80-99ee-6da96f3d587d service nova] Releasing lock "refresh_cache-a3101076-36d6-409a-8072-638107e63073" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.214454] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523a01ab-7bed-9b86-a0f0-051544b15d51, 'name': SearchDatastore_Task, 'duration_secs': 0.010795} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.214880] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.214880] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1725.215486] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.215486] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.215486] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1725.216962] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db1bd7a3-5d44-4bae-94e7-207effea53dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.223618] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.223618] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.223618] env[62619]: DEBUG nova.network.neutron [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1725.229248] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1725.229248] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1725.229248] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-366f21d1-6a4a-4b0a-b1e2-385bcf085082 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.241713] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1725.241713] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5288d546-f4f8-ab4b-04bd-c27bce6ede97" [ 1725.241713] env[62619]: _type = "Task" [ 1725.241713] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.252678] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5288d546-f4f8-ab4b-04bd-c27bce6ede97, 'name': SearchDatastore_Task, 'duration_secs': 0.010064} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.253391] env[62619]: DEBUG nova.network.neutron [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Successfully created port: 279a158f-38d3-41bb-ab72-22a80ceca030 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1725.256126] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e899f87-ba36-4aad-a1ca-464c858f71b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.262623] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1725.262623] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52abe8ae-6262-0d91-4b5f-ffbc5da8a491" [ 1725.262623] env[62619]: _type = "Task" [ 1725.262623] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.273789] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52abe8ae-6262-0d91-4b5f-ffbc5da8a491, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.293638] env[62619]: DEBUG oslo_concurrency.lockutils [req-c32d1122-5b67-4eed-93fc-48c17771063a req-1545d661-0af9-4b20-a8c0-d176e4294b67 service nova] Releasing lock "refresh_cache-f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.422391] env[62619]: DEBUG nova.compute.manager [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1725.424273] env[62619]: DEBUG nova.objects.base [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Object Instance<4cd6dafd-4f19-4d0f-8e07-8171a6a71e85> lazy-loaded attributes: resources,numa_topology {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1725.469139] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778124, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.676350] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1725.677040] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9e6f2dd-d3e6-4130-8800-5a028770b2e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.684088] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1725.684088] env[62619]: value = "task-1778125" [ 1725.684088] env[62619]: _type = "Task" [ 1725.684088] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.696073] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.728054] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bb4b5e-fc1e-4179-81fb-8f10b63e1adb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.736383] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a229c8-b7ab-4cde-a527-e757172809fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.773162] env[62619]: DEBUG nova.network.neutron [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1725.779987] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15423e4-0e3f-43e5-9aa4-6d97c0e3d461 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.788833] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52abe8ae-6262-0d91-4b5f-ffbc5da8a491, 'name': SearchDatastore_Task, 'duration_secs': 0.01188} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.790893] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.791167] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a3101076-36d6-409a-8072-638107e63073/a3101076-36d6-409a-8072-638107e63073.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1725.791461] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58d32dd7-c33c-42c9-bfed-bfcbe0ff9c02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.794228] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61b5ca7-734f-42dc-834f-dd988bbfe3f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.811911] env[62619]: DEBUG nova.compute.provider_tree [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1725.817139] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1725.817139] env[62619]: value = "task-1778126" [ 1725.817139] env[62619]: _type = "Task" [ 1725.817139] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.827792] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.911130] env[62619]: DEBUG nova.compute.manager [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Received event network-vif-plugged-9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1725.911470] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] Acquiring lock "8745aa7f-9848-4320-94b5-08b7e3bccf80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.911763] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] Lock "8745aa7f-9848-4320-94b5-08b7e3bccf80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.912036] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] Lock "8745aa7f-9848-4320-94b5-08b7e3bccf80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.912345] env[62619]: DEBUG nova.compute.manager [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] No waiting events found dispatching network-vif-plugged-9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1725.912600] env[62619]: WARNING nova.compute.manager [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Received unexpected event network-vif-plugged-9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01 for instance with vm_state building and task_state spawning. [ 1725.912845] env[62619]: DEBUG nova.compute.manager [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Received event network-changed-9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1725.913095] env[62619]: DEBUG nova.compute.manager [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Refreshing instance network info cache due to event network-changed-9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1725.913349] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] Acquiring lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.969513] env[62619]: DEBUG oslo_vmware.api [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778124, 'name': PowerOnVM_Task, 'duration_secs': 0.579193} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.969749] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1725.969950] env[62619]: INFO nova.compute.manager [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Took 8.09 seconds to spawn the instance on the hypervisor. [ 1725.970149] env[62619]: DEBUG nova.compute.manager [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1725.971143] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439445f2-b07c-46d9-8d40-1abd1bf926eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.006429] env[62619]: DEBUG nova.network.neutron [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Updating instance_info_cache with network_info: [{"id": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "address": "fa:16:3e:da:01:a7", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9abe90c7-65", "ovs_interfaceid": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.196757] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778125, 'name': PowerOffVM_Task, 'duration_secs': 0.283913} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.197052] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1726.258911] env[62619]: INFO nova.compute.manager [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Detaching volume b0427cd3-da5b-4bb2-96ed-03dc601c6f51 [ 1726.295817] env[62619]: INFO nova.virt.block_device [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Attempting to driver detach volume b0427cd3-da5b-4bb2-96ed-03dc601c6f51 from mountpoint /dev/sdb [ 1726.296098] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1726.296296] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369108', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'name': 'volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '11869077-b428-413f-9f8f-7eac08d2d9ec', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'serial': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1726.297218] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c9616f-4270-457e-a126-f45c41084d7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.319866] env[62619]: DEBUG nova.scheduler.client.report [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1726.327107] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b90f62-fc26-4e50-bf65-40c3eb56902f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.337038] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778126, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47544} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.339053] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a3101076-36d6-409a-8072-638107e63073/a3101076-36d6-409a-8072-638107e63073.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1726.339271] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1726.340344] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca88ab78-bcde-48f2-9ba3-45eec6d3d379 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.342763] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5feca741-e07e-4c01-8101-f9cf119fbf59 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.367821] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86ed56f-da76-4c94-af12-b84c407d8581 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.370717] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1726.370717] env[62619]: value = "task-1778127" [ 1726.370717] env[62619]: _type = "Task" [ 1726.370717] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.386176] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] The volume has not been displaced from its original location: [datastore1] volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51/volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1726.391278] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfiguring VM instance instance-0000004b to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1726.392033] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8fbdba0-7b33-4e7b-a609-659733f72cb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.409403] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778127, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.415946] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1726.415946] env[62619]: value = "task-1778128" [ 1726.415946] env[62619]: _type = "Task" [ 1726.415946] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.426579] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778128, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.431902] env[62619]: DEBUG nova.compute.manager [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1726.460782] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1726.461072] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1726.461233] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1726.461418] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1726.461562] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1726.461708] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1726.461921] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1726.462091] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1726.462260] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1726.462421] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1726.462591] env[62619]: DEBUG nova.virt.hardware [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1726.463512] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d601f76e-c692-4578-b985-58eed68182f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.472505] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4abffb-73e9-4e23-aae4-51dddb0b168f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.496385] env[62619]: INFO nova.compute.manager [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Took 22.42 seconds to build instance. [ 1726.512030] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.512030] env[62619]: DEBUG nova.compute.manager [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Instance network_info: |[{"id": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "address": "fa:16:3e:da:01:a7", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9abe90c7-65", "ovs_interfaceid": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1726.512030] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] Acquired lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.512030] env[62619]: DEBUG nova.network.neutron [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Refreshing network info cache for port 9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1726.512030] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:01:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1726.519480] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Creating folder: Project (bb5d393c514d41f78fd4ea45d2f888a8). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1726.523265] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf29555f-6769-4020-8494-ae257ca1959f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.537046] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Created folder: Project (bb5d393c514d41f78fd4ea45d2f888a8) in parent group-v368875. [ 1726.537046] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Creating folder: Instances. Parent ref: group-v369112. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1726.537046] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4be343c-d163-47e0-918a-2407e29513cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.547825] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Created folder: Instances in parent group-v369112. [ 1726.548098] env[62619]: DEBUG oslo.service.loopingcall [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1726.548297] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1726.548638] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66b53949-b77f-4fc4-aa99-917caa72fea9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.572087] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1726.572087] env[62619]: value = "task-1778131" [ 1726.572087] env[62619]: _type = "Task" [ 1726.572087] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.580869] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778131, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.824977] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.416s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.829378] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.843s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.831303] env[62619]: INFO nova.compute.claims [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1726.855729] env[62619]: DEBUG nova.compute.manager [req-701e4b40-ed2b-45dc-984c-43f5330e3a50 req-657f9abf-96aa-4efe-aab0-09fa8d51a4ad service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Received event network-vif-plugged-279a158f-38d3-41bb-ab72-22a80ceca030 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1726.855952] env[62619]: DEBUG oslo_concurrency.lockutils [req-701e4b40-ed2b-45dc-984c-43f5330e3a50 req-657f9abf-96aa-4efe-aab0-09fa8d51a4ad service nova] Acquiring lock "e302e431-1f95-4ab5-bfca-59450fd887f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.856219] env[62619]: DEBUG oslo_concurrency.lockutils [req-701e4b40-ed2b-45dc-984c-43f5330e3a50 req-657f9abf-96aa-4efe-aab0-09fa8d51a4ad service nova] Lock "e302e431-1f95-4ab5-bfca-59450fd887f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.856398] env[62619]: DEBUG oslo_concurrency.lockutils [req-701e4b40-ed2b-45dc-984c-43f5330e3a50 req-657f9abf-96aa-4efe-aab0-09fa8d51a4ad service nova] Lock "e302e431-1f95-4ab5-bfca-59450fd887f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.856589] env[62619]: DEBUG nova.compute.manager [req-701e4b40-ed2b-45dc-984c-43f5330e3a50 req-657f9abf-96aa-4efe-aab0-09fa8d51a4ad service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] No waiting events found dispatching network-vif-plugged-279a158f-38d3-41bb-ab72-22a80ceca030 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1726.856788] env[62619]: WARNING nova.compute.manager [req-701e4b40-ed2b-45dc-984c-43f5330e3a50 req-657f9abf-96aa-4efe-aab0-09fa8d51a4ad service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Received unexpected event network-vif-plugged-279a158f-38d3-41bb-ab72-22a80ceca030 for instance with vm_state building and task_state spawning. [ 1726.884570] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778127, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06784} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.885101] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1726.886216] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674e5f26-57b8-4cef-a16a-cc7f15f28f26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.916820] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] a3101076-36d6-409a-8072-638107e63073/a3101076-36d6-409a-8072-638107e63073.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1726.917944] env[62619]: DEBUG nova.network.neutron [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Updated VIF entry in instance network info cache for port 9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1726.918525] env[62619]: DEBUG nova.network.neutron [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Updating instance_info_cache with network_info: [{"id": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "address": "fa:16:3e:da:01:a7", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9abe90c7-65", "ovs_interfaceid": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.920272] env[62619]: DEBUG nova.network.neutron [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Successfully updated port: 279a158f-38d3-41bb-ab72-22a80ceca030 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1726.922057] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc056b72-d4ec-47c8-8abc-da256a587a01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.937761] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] Releasing lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.938081] env[62619]: DEBUG nova.compute.manager [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Received event network-changed-5911524f-a8b5-4591-a312-ea0cefac24df {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1726.938354] env[62619]: DEBUG nova.compute.manager [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Refreshing instance network info cache due to event network-changed-5911524f-a8b5-4591-a312-ea0cefac24df. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1726.938685] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] Acquiring lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.938776] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] Acquired lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.938956] env[62619]: DEBUG nova.network.neutron [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Refreshing network info cache for port 5911524f-a8b5-4591-a312-ea0cefac24df {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1726.940481] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "refresh_cache-e302e431-1f95-4ab5-bfca-59450fd887f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.940481] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "refresh_cache-e302e431-1f95-4ab5-bfca-59450fd887f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.940644] env[62619]: DEBUG nova.network.neutron [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1726.956769] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778128, 'name': ReconfigVM_Task, 'duration_secs': 0.316892} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.957923] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfigured VM instance instance-0000004b to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1726.962599] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1726.962599] env[62619]: value = "task-1778132" [ 1726.962599] env[62619]: _type = "Task" [ 1726.962599] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.963875] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f52a7a63-a0b8-48f4-9f67-595b44d2243d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.985325] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.986739] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1726.986739] env[62619]: value = "task-1778133" [ 1726.986739] env[62619]: _type = "Task" [ 1726.986739] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.995944] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778133, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.996779] env[62619]: DEBUG nova.network.neutron [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1726.999637] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f1c28bd-b2ba-4304-9745-32de530951b2 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.943s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.088579] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778131, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.210358] env[62619]: DEBUG nova.network.neutron [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Updating instance_info_cache with network_info: [{"id": "279a158f-38d3-41bb-ab72-22a80ceca030", "address": "fa:16:3e:a7:b3:9e", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap279a158f-38", "ovs_interfaceid": "279a158f-38d3-41bb-ab72-22a80ceca030", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.212354] env[62619]: DEBUG nova.network.neutron [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updated VIF entry in instance network info cache for port 5911524f-a8b5-4591-a312-ea0cefac24df. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1727.212696] env[62619]: DEBUG nova.network.neutron [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance_info_cache with network_info: [{"id": "5911524f-a8b5-4591-a312-ea0cefac24df", "address": "fa:16:3e:bd:c4:f9", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5911524f-a8", "ovs_interfaceid": "5911524f-a8b5-4591-a312-ea0cefac24df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.342823] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6e56276d-3b6f-433e-a298-185bf474ea15 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 44.098s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.343753] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 17.777s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.343948] env[62619]: INFO nova.compute.manager [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Unshelving [ 1727.484439] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.498192] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778133, 'name': ReconfigVM_Task, 'duration_secs': 0.372552} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.498516] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369108', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'name': 'volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '11869077-b428-413f-9f8f-7eac08d2d9ec', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'serial': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1727.584352] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778131, 'name': CreateVM_Task, 'duration_secs': 0.789493} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.584535] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1727.585313] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.585466] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.585793] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1727.586113] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8927cf6-3c97-45ea-b6d4-422e8825e2f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.591924] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1727.591924] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b7abe-3c9b-e662-d3af-0ba02150d96e" [ 1727.591924] env[62619]: _type = "Task" [ 1727.591924] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.600831] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b7abe-3c9b-e662-d3af-0ba02150d96e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.715385] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "refresh_cache-e302e431-1f95-4ab5-bfca-59450fd887f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.715846] env[62619]: DEBUG nova.compute.manager [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Instance network_info: |[{"id": "279a158f-38d3-41bb-ab72-22a80ceca030", "address": "fa:16:3e:a7:b3:9e", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap279a158f-38", "ovs_interfaceid": "279a158f-38d3-41bb-ab72-22a80ceca030", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1727.716429] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb509932-3eb1-4f51-9e4f-a092eb5abbb6 req-2fe0e726-d64b-4bb4-9406-a20e677fc929 service nova] Releasing lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.716923] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:b3:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '279a158f-38d3-41bb-ab72-22a80ceca030', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1727.724632] env[62619]: DEBUG oslo.service.loopingcall [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1727.724925] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1727.725201] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-190229e6-def1-44ca-9eb8-c1b96e8308c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.746984] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1727.746984] env[62619]: value = "task-1778134" [ 1727.746984] env[62619]: _type = "Task" [ 1727.746984] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.757027] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778134, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.986021] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778132, 'name': ReconfigVM_Task, 'duration_secs': 1.013125} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.989437] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Reconfigured VM instance instance-00000056 to attach disk [datastore1] a3101076-36d6-409a-8072-638107e63073/a3101076-36d6-409a-8072-638107e63073.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1727.990665] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51e995bd-926a-47c9-abcc-d07f3181f0a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.999268] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1727.999268] env[62619]: value = "task-1778135" [ 1727.999268] env[62619]: _type = "Task" [ 1727.999268] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.015873] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778135, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.107650] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b7abe-3c9b-e662-d3af-0ba02150d96e, 'name': SearchDatastore_Task, 'duration_secs': 0.016945} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.107650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.107650] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1728.107650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1728.108755] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1728.108755] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1728.112035] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2776b884-43ec-4669-a974-9f7df0f91d65 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.123115] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1728.126323] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1728.126323] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbef36c3-f04e-4ecb-b25d-d487f2fdefb2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.139662] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1728.139662] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ff35bc-5438-d9e6-c3ec-3ae486b8e804" [ 1728.139662] env[62619]: _type = "Task" [ 1728.139662] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.152273] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ff35bc-5438-d9e6-c3ec-3ae486b8e804, 'name': SearchDatastore_Task, 'duration_secs': 0.010415} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.153193] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66429869-8d9e-4eed-b379-aa1422c7064b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.160156] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1728.160156] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521627b6-c672-7bfa-4d23-b4529e759e57" [ 1728.160156] env[62619]: _type = "Task" [ 1728.160156] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.173929] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521627b6-c672-7bfa-4d23-b4529e759e57, 'name': SearchDatastore_Task, 'duration_secs': 0.011368} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.174376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.174688] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8745aa7f-9848-4320-94b5-08b7e3bccf80/8745aa7f-9848-4320-94b5-08b7e3bccf80.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1728.174983] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28519725-0646-49c9-b24b-4ce0a96d503c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.183441] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1728.183441] env[62619]: value = "task-1778136" [ 1728.183441] env[62619]: _type = "Task" [ 1728.183441] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.193803] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.236908] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cd6c1f-b51e-4602-a548-4ae7b6826815 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.244550] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637af006-17ec-43a6-8424-4e6c22c37796 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.256012] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778134, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.280655] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040c3123-9f89-4bc4-821f-e6534d2fae44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.288263] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5a730f-15bf-42eb-8653-d8119d27965e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.301983] env[62619]: DEBUG nova.compute.provider_tree [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.367318] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.512203] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778135, 'name': Rename_Task, 'duration_secs': 0.240721} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.512523] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1728.512723] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c3415a8-272e-461c-9590-5f244b96951f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.520467] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1728.520467] env[62619]: value = "task-1778137" [ 1728.520467] env[62619]: _type = "Task" [ 1728.520467] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.528362] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778137, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.559286] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1728.559658] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c3721df-4554-4cf0-a37c-cfc46ad0d832 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.568691] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1728.568691] env[62619]: value = "task-1778138" [ 1728.568691] env[62619]: _type = "Task" [ 1728.568691] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.579555] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1728.579780] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1728.579973] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369108', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'name': 'volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '11869077-b428-413f-9f8f-7eac08d2d9ec', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'serial': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1728.580765] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f73a56e-c1d4-4707-87be-7ac929de79f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.601858] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e5d29d-e7cb-4627-a0d0-4c7cbb8b9572 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.607274] env[62619]: DEBUG nova.compute.manager [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1728.612770] env[62619]: WARNING nova.virt.vmwareapi.driver [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1728.613093] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1728.614112] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af3bff0-04cd-4e6d-bdf9-9e3655c9ba84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.622988] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1728.623281] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa8fd1ab-4154-45bd-a314-04ff9c3076a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.693773] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.759059] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778134, 'name': CreateVM_Task, 'duration_secs': 0.535179} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.760116] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1728.760512] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1728.760620] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1728.760794] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleting the datastore file [datastore1] 11869077-b428-413f-9f8f-7eac08d2d9ec {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1728.761498] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1728.761651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1728.761960] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1728.762209] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b39afbd2-7897-404b-8e05-e57bd272df68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.764016] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edf5ab51-8784-43d2-a794-3149c36882bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.770020] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1728.770020] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bf346c-a816-789c-ae12-434e5868fa54" [ 1728.770020] env[62619]: _type = "Task" [ 1728.770020] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.771358] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1728.771358] env[62619]: value = "task-1778140" [ 1728.771358] env[62619]: _type = "Task" [ 1728.771358] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.782876] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bf346c-a816-789c-ae12-434e5868fa54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.785976] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778140, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.805484] env[62619]: DEBUG nova.scheduler.client.report [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1728.887292] env[62619]: DEBUG nova.compute.manager [req-5f98750e-b394-41a9-94ae-d42811b73cc7 req-c64c6e01-d262-4df5-9011-30060c3f4724 service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Received event network-changed-279a158f-38d3-41bb-ab72-22a80ceca030 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1728.887502] env[62619]: DEBUG nova.compute.manager [req-5f98750e-b394-41a9-94ae-d42811b73cc7 req-c64c6e01-d262-4df5-9011-30060c3f4724 service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Refreshing instance network info cache due to event network-changed-279a158f-38d3-41bb-ab72-22a80ceca030. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1728.887757] env[62619]: DEBUG oslo_concurrency.lockutils [req-5f98750e-b394-41a9-94ae-d42811b73cc7 req-c64c6e01-d262-4df5-9011-30060c3f4724 service nova] Acquiring lock "refresh_cache-e302e431-1f95-4ab5-bfca-59450fd887f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1728.887865] env[62619]: DEBUG oslo_concurrency.lockutils [req-5f98750e-b394-41a9-94ae-d42811b73cc7 req-c64c6e01-d262-4df5-9011-30060c3f4724 service nova] Acquired lock "refresh_cache-e302e431-1f95-4ab5-bfca-59450fd887f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1728.888142] env[62619]: DEBUG nova.network.neutron [req-5f98750e-b394-41a9-94ae-d42811b73cc7 req-c64c6e01-d262-4df5-9011-30060c3f4724 service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Refreshing network info cache for port 279a158f-38d3-41bb-ab72-22a80ceca030 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1729.034359] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778137, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.131515] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.201212] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778136, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.286092] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bf346c-a816-789c-ae12-434e5868fa54, 'name': SearchDatastore_Task, 'duration_secs': 0.014447} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.289529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.289769] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1729.290015] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.290169] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.290359] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1729.290656] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778140, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264693} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.290864] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31bc8d68-b5e6-4a78-8e95-f4fd1bc9a51e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.292920] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1729.293133] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1729.293331] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1729.309275] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1729.309520] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1729.310824] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.311339] env[62619]: DEBUG nova.compute.manager [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1729.314067] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0801665b-d27e-4ce2-9868-189066e88bc7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.317518] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.858s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.317769] env[62619]: DEBUG nova.objects.instance [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lazy-loading 'resources' on Instance uuid a6ba8114-0261-4894-98c0-9e0360f6d256 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1729.323627] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1729.323627] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5265391c-b513-ea88-db90-6f39d952efbf" [ 1729.323627] env[62619]: _type = "Task" [ 1729.323627] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.334245] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5265391c-b513-ea88-db90-6f39d952efbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.531375] env[62619]: DEBUG oslo_vmware.api [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778137, 'name': PowerOnVM_Task, 'duration_secs': 0.656911} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.531695] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1729.531818] env[62619]: INFO nova.compute.manager [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Took 8.17 seconds to spawn the instance on the hypervisor. [ 1729.531992] env[62619]: DEBUG nova.compute.manager [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1729.532795] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a33c245-324c-4535-bc99-714c1f309bd8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.611287] env[62619]: DEBUG nova.network.neutron [req-5f98750e-b394-41a9-94ae-d42811b73cc7 req-c64c6e01-d262-4df5-9011-30060c3f4724 service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Updated VIF entry in instance network info cache for port 279a158f-38d3-41bb-ab72-22a80ceca030. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1729.611678] env[62619]: DEBUG nova.network.neutron [req-5f98750e-b394-41a9-94ae-d42811b73cc7 req-c64c6e01-d262-4df5-9011-30060c3f4724 service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Updating instance_info_cache with network_info: [{"id": "279a158f-38d3-41bb-ab72-22a80ceca030", "address": "fa:16:3e:a7:b3:9e", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap279a158f-38", "ovs_interfaceid": "279a158f-38d3-41bb-ab72-22a80ceca030", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1729.695838] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778136, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.291342} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.696115] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8745aa7f-9848-4320-94b5-08b7e3bccf80/8745aa7f-9848-4320-94b5-08b7e3bccf80.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1729.696345] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1729.696596] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39bb310f-9a7e-44c2-8c32-96779a9d2a60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.703206] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1729.703206] env[62619]: value = "task-1778141" [ 1729.703206] env[62619]: _type = "Task" [ 1729.703206] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.711269] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778141, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.799041] env[62619]: INFO nova.virt.block_device [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Booting with volume b0427cd3-da5b-4bb2-96ed-03dc601c6f51 at /dev/sdb [ 1729.819329] env[62619]: DEBUG nova.compute.utils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1729.820519] env[62619]: DEBUG nova.compute.manager [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1729.820670] env[62619]: DEBUG nova.network.neutron [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1729.835731] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5265391c-b513-ea88-db90-6f39d952efbf, 'name': SearchDatastore_Task, 'duration_secs': 0.053015} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.836552] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98334397-3c79-4694-b729-45d4716133bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.840292] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bbb77230-8b76-410d-abf6-af9a24665ac4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.846730] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1729.846730] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527c42aa-dee6-5818-d619-d2091cd31804" [ 1729.846730] env[62619]: _type = "Task" [ 1729.846730] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.854357] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dacda0-c64a-4194-8435-33ad31796d34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.877234] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527c42aa-dee6-5818-d619-d2091cd31804, 'name': SearchDatastore_Task, 'duration_secs': 0.009979} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.880046] env[62619]: DEBUG nova.policy [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e77796b31f44a33905e00db18bba196', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '767e455ac6ef43d1b587e3e953ed8a9d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1729.881797] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.882056] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e302e431-1f95-4ab5-bfca-59450fd887f0/e302e431-1f95-4ab5-bfca-59450fd887f0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1729.882414] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89164997-7faa-4eda-8152-ae3acb0d1316 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.905460] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e24a6be6-9c47-4895-927c-00051664cba7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.910476] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1729.910476] env[62619]: value = "task-1778142" [ 1729.910476] env[62619]: _type = "Task" [ 1729.910476] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.919512] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28caf0b5-4ea6-437e-b3c7-f6cf773a1f54 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.939281] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778142, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.967502] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3972230-e6a4-47dc-9d92-5683ca35df2a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.975886] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73ec29b-d275-46da-aef0-d30cbdf831d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.997313] env[62619]: DEBUG nova.virt.block_device [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Updating existing volume attachment record: c835947a-9cc6-432b-8a33-2cfe22dbcc5b {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1730.054022] env[62619]: INFO nova.compute.manager [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Took 24.82 seconds to build instance. [ 1730.114689] env[62619]: DEBUG oslo_concurrency.lockutils [req-5f98750e-b394-41a9-94ae-d42811b73cc7 req-c64c6e01-d262-4df5-9011-30060c3f4724 service nova] Releasing lock "refresh_cache-e302e431-1f95-4ab5-bfca-59450fd887f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.218035] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778141, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173151} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.218988] env[62619]: DEBUG nova.network.neutron [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Successfully created port: 2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1730.221868] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1730.224464] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f9c2bb-f847-4925-b7f5-364e94beba8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.230100] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fea352-45de-4df7-bb7f-504d375c0a0f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.250853] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab68395-b9c8-4590-97a2-c38d7d3a1ac3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.262613] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 8745aa7f-9848-4320-94b5-08b7e3bccf80/8745aa7f-9848-4320-94b5-08b7e3bccf80.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1730.262956] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20dd46ba-9046-47be-a6bb-f03cd932c11d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.309349] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ff9b77-b97a-4c4a-ba59-378ada159869 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.312880] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1730.312880] env[62619]: value = "task-1778143" [ 1730.312880] env[62619]: _type = "Task" [ 1730.312880] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.321685] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e80da2-d0f3-43ff-8583-1b1ee90aec0f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.330404] env[62619]: DEBUG nova.compute.manager [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1730.334083] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778143, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.349184] env[62619]: DEBUG nova.compute.provider_tree [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1730.425180] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778142, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.557802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7464b5-d3bb-42c1-a547-5dd718f96cad tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "a3101076-36d6-409a-8072-638107e63073" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.341s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.824438] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778143, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.850389] env[62619]: DEBUG nova.scheduler.client.report [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1730.922063] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778142, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579726} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.922430] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e302e431-1f95-4ab5-bfca-59450fd887f0/e302e431-1f95-4ab5-bfca-59450fd887f0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1730.922654] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1730.922906] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4d321d92-252b-41e5-934c-d061a3dd9039 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.929907] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1730.929907] env[62619]: value = "task-1778144" [ 1730.929907] env[62619]: _type = "Task" [ 1730.929907] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.938177] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778144, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.280955] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "a3101076-36d6-409a-8072-638107e63073" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.281238] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "a3101076-36d6-409a-8072-638107e63073" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.281442] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "a3101076-36d6-409a-8072-638107e63073-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.281685] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "a3101076-36d6-409a-8072-638107e63073-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.281797] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "a3101076-36d6-409a-8072-638107e63073-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.284182] env[62619]: INFO nova.compute.manager [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Terminating instance [ 1731.325419] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778143, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.342272] env[62619]: DEBUG nova.compute.manager [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1731.355524] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.038s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.357949] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.752s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.359593] env[62619]: INFO nova.compute.claims [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1731.372278] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1731.372545] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1731.372668] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1731.372811] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1731.372953] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1731.373110] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1731.373314] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1731.373733] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1731.373733] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1731.373879] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1731.374060] env[62619]: DEBUG nova.virt.hardware [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1731.374944] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f4ed19-c377-4be8-8e26-fbc1b0dd20e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.378360] env[62619]: INFO nova.scheduler.client.report [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Deleted allocations for instance a6ba8114-0261-4894-98c0-9e0360f6d256 [ 1731.385206] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d4d93f-269a-40c6-826d-3a129e395c04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.440463] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778144, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065288} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.440734] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1731.441489] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb5d25d-796e-4d8e-99f8-c1b10f3417f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.466034] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] e302e431-1f95-4ab5-bfca-59450fd887f0/e302e431-1f95-4ab5-bfca-59450fd887f0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1731.466222] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17746e69-e2f1-4eab-bbc0-f617ee629fcb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.486377] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1731.486377] env[62619]: value = "task-1778145" [ 1731.486377] env[62619]: _type = "Task" [ 1731.486377] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.496981] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.788888] env[62619]: DEBUG nova.compute.manager [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1731.789217] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1731.790304] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30026227-b51d-485e-ad01-8c8edc094fe5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.799275] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1731.799595] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df7b5bd6-5441-4d72-abbc-61448b0ac00b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.806692] env[62619]: DEBUG oslo_vmware.api [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1731.806692] env[62619]: value = "task-1778146" [ 1731.806692] env[62619]: _type = "Task" [ 1731.806692] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.816164] env[62619]: DEBUG oslo_vmware.api [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778146, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.825335] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778143, 'name': ReconfigVM_Task, 'duration_secs': 1.076958} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.825603] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 8745aa7f-9848-4320-94b5-08b7e3bccf80/8745aa7f-9848-4320-94b5-08b7e3bccf80.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1731.826291] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5db62dbc-a3a6-441e-b9ea-a6b2ced58bf2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.834145] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1731.834145] env[62619]: value = "task-1778147" [ 1731.834145] env[62619]: _type = "Task" [ 1731.834145] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.844050] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778147, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.889400] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e701e902-3def-4362-87d0-61644b48e895 tempest-ServersWithSpecificFlavorTestJSON-742862226 tempest-ServersWithSpecificFlavorTestJSON-742862226-project-member] Lock "a6ba8114-0261-4894-98c0-9e0360f6d256" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.561s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.997600] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778145, 'name': ReconfigVM_Task, 'duration_secs': 0.490802} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.997884] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Reconfigured VM instance instance-00000058 to attach disk [datastore1] e302e431-1f95-4ab5-bfca-59450fd887f0/e302e431-1f95-4ab5-bfca-59450fd887f0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1731.998561] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a528aa4c-149c-4c05-a736-bf8e08d9170e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.006155] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1732.006155] env[62619]: value = "task-1778148" [ 1732.006155] env[62619]: _type = "Task" [ 1732.006155] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.017447] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778148, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.131881] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1732.132161] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1732.132317] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1732.132496] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1732.132639] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1732.132783] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1732.132982] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1732.133160] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1732.133328] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1732.133497] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1732.133654] env[62619]: DEBUG nova.virt.hardware [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1732.134574] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a6fc5b-9a56-4a38-a19a-63659eb81c47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.144371] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5354000f-19c3-4e9f-9d7e-26b4d00ae8d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.160557] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:be:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1732.168078] env[62619]: DEBUG oslo.service.loopingcall [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1732.169984] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1732.169984] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-718dd360-6a46-49ab-a6af-e3ae0ed377d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.185580] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.185580] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.185580] env[62619]: INFO nova.compute.manager [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Shelving [ 1732.194653] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1732.194653] env[62619]: value = "task-1778149" [ 1732.194653] env[62619]: _type = "Task" [ 1732.194653] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.195792] env[62619]: DEBUG nova.compute.manager [req-5beed5d7-d094-4adc-9e61-78f52408fcf9 req-596146ca-856e-49d1-8ef9-fe3b1824e813 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Received event network-vif-plugged-2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1732.195988] env[62619]: DEBUG oslo_concurrency.lockutils [req-5beed5d7-d094-4adc-9e61-78f52408fcf9 req-596146ca-856e-49d1-8ef9-fe3b1824e813 service nova] Acquiring lock "32aed8cd-1583-4253-bfb6-a98610e2f32e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.196784] env[62619]: DEBUG oslo_concurrency.lockutils [req-5beed5d7-d094-4adc-9e61-78f52408fcf9 req-596146ca-856e-49d1-8ef9-fe3b1824e813 service nova] Lock "32aed8cd-1583-4253-bfb6-a98610e2f32e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.196957] env[62619]: DEBUG oslo_concurrency.lockutils [req-5beed5d7-d094-4adc-9e61-78f52408fcf9 req-596146ca-856e-49d1-8ef9-fe3b1824e813 service nova] Lock "32aed8cd-1583-4253-bfb6-a98610e2f32e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.197136] env[62619]: DEBUG nova.compute.manager [req-5beed5d7-d094-4adc-9e61-78f52408fcf9 req-596146ca-856e-49d1-8ef9-fe3b1824e813 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] No waiting events found dispatching network-vif-plugged-2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1732.198231] env[62619]: WARNING nova.compute.manager [req-5beed5d7-d094-4adc-9e61-78f52408fcf9 req-596146ca-856e-49d1-8ef9-fe3b1824e813 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Received unexpected event network-vif-plugged-2924458a-bf48-482f-ab31-ad34e83a94d4 for instance with vm_state building and task_state spawning. [ 1732.210091] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778149, 'name': CreateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.292929] env[62619]: DEBUG nova.network.neutron [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Successfully updated port: 2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1732.322973] env[62619]: DEBUG oslo_vmware.api [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778146, 'name': PowerOffVM_Task, 'duration_secs': 0.219092} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.323384] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1732.323616] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1732.323917] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b426223a-4b69-4e47-8527-fc96ceaab309 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.348032] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778147, 'name': Rename_Task, 'duration_secs': 0.223033} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.348032] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1732.348032] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-232259b0-91b9-4c94-ad62-eb66c9c42475 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.351783] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1732.351783] env[62619]: value = "task-1778151" [ 1732.351783] env[62619]: _type = "Task" [ 1732.351783] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.360587] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.448956] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1732.449240] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1732.449430] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleting the datastore file [datastore1] a3101076-36d6-409a-8072-638107e63073 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1732.449711] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a233148c-06f0-4e3e-b191-4e076f45b560 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.456466] env[62619]: DEBUG oslo_vmware.api [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1732.456466] env[62619]: value = "task-1778152" [ 1732.456466] env[62619]: _type = "Task" [ 1732.456466] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.465126] env[62619]: DEBUG oslo_vmware.api [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778152, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.521027] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778148, 'name': Rename_Task, 'duration_secs': 0.163005} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.521325] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1732.521653] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7f83f26-37ce-4884-94f0-531aeb9da71d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.529854] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1732.529854] env[62619]: value = "task-1778153" [ 1732.529854] env[62619]: _type = "Task" [ 1732.529854] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.545409] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778153, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.677936] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7e4d18-3917-434b-bf89-c90c1b1f4441 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.687253] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e918f643-bcbb-4e3e-93dc-801a8efe035a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.729834] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6bce5f-fcce-4b44-8c1d-ccf3eac74f65 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.738632] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778149, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.741736] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715596dd-8213-4845-b8ec-7d46d4e8f815 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.756749] env[62619]: DEBUG nova.compute.provider_tree [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1732.795816] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquiring lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.796097] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquired lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.796220] env[62619]: DEBUG nova.network.neutron [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1732.862755] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778151, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.970250] env[62619]: DEBUG oslo_vmware.api [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778152, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.233985} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.970604] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1732.970861] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1732.971106] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1732.971348] env[62619]: INFO nova.compute.manager [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: a3101076-36d6-409a-8072-638107e63073] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1732.971677] env[62619]: DEBUG oslo.service.loopingcall [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1732.971927] env[62619]: DEBUG nova.compute.manager [-] [instance: a3101076-36d6-409a-8072-638107e63073] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1732.972062] env[62619]: DEBUG nova.network.neutron [-] [instance: a3101076-36d6-409a-8072-638107e63073] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1733.045253] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778153, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.229157] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1733.229476] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3577566e-3e47-4cbd-bd6b-60687e3e6b92 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.239018] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778149, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.240607] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1733.240607] env[62619]: value = "task-1778154" [ 1733.240607] env[62619]: _type = "Task" [ 1733.240607] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.252713] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778154, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.259882] env[62619]: DEBUG nova.scheduler.client.report [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1733.339412] env[62619]: DEBUG nova.network.neutron [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1733.365823] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778151, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.544759] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778153, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.583031] env[62619]: DEBUG nova.network.neutron [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updating instance_info_cache with network_info: [{"id": "2924458a-bf48-482f-ab31-ad34e83a94d4", "address": "fa:16:3e:c5:5c:af", "network": {"id": "c853257c-5523-4c7c-ac39-b96dd377e1fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1460025320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767e455ac6ef43d1b587e3e953ed8a9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2924458a-bf", "ovs_interfaceid": "2924458a-bf48-482f-ab31-ad34e83a94d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.735892] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778149, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.751226] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778154, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.765264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.766100] env[62619]: DEBUG nova.compute.manager [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1733.769296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.692s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.770723] env[62619]: INFO nova.compute.claims [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1733.778537] env[62619]: DEBUG nova.network.neutron [-] [instance: a3101076-36d6-409a-8072-638107e63073] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.867449] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778151, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.045364] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778153, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.088682] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Releasing lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.089839] env[62619]: DEBUG nova.compute.manager [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Instance network_info: |[{"id": "2924458a-bf48-482f-ab31-ad34e83a94d4", "address": "fa:16:3e:c5:5c:af", "network": {"id": "c853257c-5523-4c7c-ac39-b96dd377e1fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1460025320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767e455ac6ef43d1b587e3e953ed8a9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2924458a-bf", "ovs_interfaceid": "2924458a-bf48-482f-ab31-ad34e83a94d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1734.093369] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:5c:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2924458a-bf48-482f-ab31-ad34e83a94d4', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1734.101303] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Creating folder: Project (767e455ac6ef43d1b587e3e953ed8a9d). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1734.101303] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d8ffcc9-45d2-4ad9-9c8f-e73e7af2a37b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.117208] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Created folder: Project (767e455ac6ef43d1b587e3e953ed8a9d) in parent group-v368875. [ 1734.117452] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Creating folder: Instances. Parent ref: group-v369117. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1734.117729] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-830886aa-be49-4a7e-a366-63a58895962c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.131723] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Created folder: Instances in parent group-v369117. [ 1734.132158] env[62619]: DEBUG oslo.service.loopingcall [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1734.132428] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1734.133124] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61b3e0dd-f8b6-4d29-8020-021f3b839f24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.160447] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1734.160447] env[62619]: value = "task-1778157" [ 1734.160447] env[62619]: _type = "Task" [ 1734.160447] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.173172] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778157, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.237545] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778149, 'name': CreateVM_Task, 'duration_secs': 1.575531} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.237726] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1734.238434] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.240112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.240112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1734.240112] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-561fbaef-2165-4c6d-9476-adc6bb972fe6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.246081] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1734.246081] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52372326-0361-607d-45ee-058b352b4151" [ 1734.246081] env[62619]: _type = "Task" [ 1734.246081] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.254906] env[62619]: DEBUG nova.compute.manager [req-9a52c445-08e2-431a-bc90-9a12b762606d req-f1fcddb5-260e-4e3a-bdf2-5efd73ac2b07 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Received event network-changed-2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1734.255126] env[62619]: DEBUG nova.compute.manager [req-9a52c445-08e2-431a-bc90-9a12b762606d req-f1fcddb5-260e-4e3a-bdf2-5efd73ac2b07 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Refreshing instance network info cache due to event network-changed-2924458a-bf48-482f-ab31-ad34e83a94d4. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1734.255683] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a52c445-08e2-431a-bc90-9a12b762606d req-f1fcddb5-260e-4e3a-bdf2-5efd73ac2b07 service nova] Acquiring lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.255683] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a52c445-08e2-431a-bc90-9a12b762606d req-f1fcddb5-260e-4e3a-bdf2-5efd73ac2b07 service nova] Acquired lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.255683] env[62619]: DEBUG nova.network.neutron [req-9a52c445-08e2-431a-bc90-9a12b762606d req-f1fcddb5-260e-4e3a-bdf2-5efd73ac2b07 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Refreshing network info cache for port 2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1734.267634] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778154, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.275754] env[62619]: DEBUG nova.compute.utils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1734.280389] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52372326-0361-607d-45ee-058b352b4151, 'name': SearchDatastore_Task, 'duration_secs': 0.011883} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.280533] env[62619]: DEBUG nova.compute.manager [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1734.280716] env[62619]: DEBUG nova.network.neutron [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1734.283862] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.284134] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1734.284437] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.284649] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.284786] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1734.285826] env[62619]: INFO nova.compute.manager [-] [instance: a3101076-36d6-409a-8072-638107e63073] Took 1.31 seconds to deallocate network for instance. [ 1734.285826] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7ddc5d5-30b4-4da9-9fdd-f54e151c2664 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.303229] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1734.303488] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1734.304745] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb6ab7f6-ad7b-4a0c-afe8-9a2fec9fffd0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.312184] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1734.312184] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527ee1df-ba91-a5da-38dc-17b85185ac14" [ 1734.312184] env[62619]: _type = "Task" [ 1734.312184] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.322475] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527ee1df-ba91-a5da-38dc-17b85185ac14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.364989] env[62619]: DEBUG oslo_vmware.api [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778151, 'name': PowerOnVM_Task, 'duration_secs': 1.875163} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.365317] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1734.365518] env[62619]: INFO nova.compute.manager [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Took 10.46 seconds to spawn the instance on the hypervisor. [ 1734.365693] env[62619]: DEBUG nova.compute.manager [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1734.367223] env[62619]: DEBUG nova.policy [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81d9019dac6940728d55a47a730a3419', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c81be375fe34117838fcda4608d4091', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1734.369331] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3376e4a8-62e2-42d9-bf26-26fd9572ac7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.547919] env[62619]: DEBUG oslo_vmware.api [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778153, 'name': PowerOnVM_Task, 'duration_secs': 1.750232} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.548226] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1734.548432] env[62619]: INFO nova.compute.manager [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Took 8.12 seconds to spawn the instance on the hypervisor. [ 1734.548608] env[62619]: DEBUG nova.compute.manager [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1734.549833] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfbe4c7-949b-4571-a16c-cdf73e0f5469 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.674928] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778157, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.754429] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778154, 'name': PowerOffVM_Task, 'duration_secs': 1.125462} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.755175] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1734.756725] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278b718f-f9f9-450b-875f-cee1985be089 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.777537] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c373caee-8de2-4de5-a250-a78ad542a6ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.780845] env[62619]: DEBUG nova.compute.manager [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1734.796505] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.827142] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527ee1df-ba91-a5da-38dc-17b85185ac14, 'name': SearchDatastore_Task, 'duration_secs': 0.009837} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.831798] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d3f73d0-8131-4be3-aa89-b0b06783e886 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.840303] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1734.840303] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529c133b-84b8-ea59-a0f9-196def7cf17e" [ 1734.840303] env[62619]: _type = "Task" [ 1734.840303] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.848812] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529c133b-84b8-ea59-a0f9-196def7cf17e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.888598] env[62619]: INFO nova.compute.manager [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Took 28.80 seconds to build instance. [ 1734.995359] env[62619]: DEBUG nova.network.neutron [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Successfully created port: d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1735.072038] env[62619]: INFO nova.compute.manager [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Took 28.47 seconds to build instance. [ 1735.146100] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da2a225-9888-4744-a11e-0657b7e3159e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.154669] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737401e6-5a74-4d9e-9db0-9cdd1e873f1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.199513] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b347b0db-eded-4fda-91d6-5eae92525a6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.212092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e230fc8f-0b9a-41ec-98e9-e1636a20f0dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.216088] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778157, 'name': CreateVM_Task, 'duration_secs': 0.602827} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.216421] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1735.217521] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.217632] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.217881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1735.218154] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee6ea2b5-ebd3-4122-9698-9afa02d8a9df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.232641] env[62619]: DEBUG nova.compute.provider_tree [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1735.238280] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1735.238280] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5252404c-d939-5570-f488-94d1d38a5986" [ 1735.238280] env[62619]: _type = "Task" [ 1735.238280] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.252027] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5252404c-d939-5570-f488-94d1d38a5986, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.298505] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1735.298505] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0325b77b-2922-436c-a830-4870d4406347 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.307051] env[62619]: DEBUG nova.network.neutron [req-9a52c445-08e2-431a-bc90-9a12b762606d req-f1fcddb5-260e-4e3a-bdf2-5efd73ac2b07 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updated VIF entry in instance network info cache for port 2924458a-bf48-482f-ab31-ad34e83a94d4. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1735.307823] env[62619]: DEBUG nova.network.neutron [req-9a52c445-08e2-431a-bc90-9a12b762606d req-f1fcddb5-260e-4e3a-bdf2-5efd73ac2b07 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updating instance_info_cache with network_info: [{"id": "2924458a-bf48-482f-ab31-ad34e83a94d4", "address": "fa:16:3e:c5:5c:af", "network": {"id": "c853257c-5523-4c7c-ac39-b96dd377e1fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1460025320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767e455ac6ef43d1b587e3e953ed8a9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2924458a-bf", "ovs_interfaceid": "2924458a-bf48-482f-ab31-ad34e83a94d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.308996] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1735.308996] env[62619]: value = "task-1778158" [ 1735.308996] env[62619]: _type = "Task" [ 1735.308996] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.319516] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778158, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.354417] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529c133b-84b8-ea59-a0f9-196def7cf17e, 'name': SearchDatastore_Task, 'duration_secs': 0.012354} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.354417] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.354417] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 11869077-b428-413f-9f8f-7eac08d2d9ec/11869077-b428-413f-9f8f-7eac08d2d9ec.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1735.354632] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45b8df29-c5a6-4b25-8582-e132c1d0be7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.364504] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1735.364504] env[62619]: value = "task-1778159" [ 1735.364504] env[62619]: _type = "Task" [ 1735.364504] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.373522] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.393548] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc7b4517-a226-4c29-8e18-1fd6c2d5f5e9 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "8745aa7f-9848-4320-94b5-08b7e3bccf80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.318s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.573719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38e1a801-a436-4fa9-9b73-156b4dd620d7 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "e302e431-1f95-4ab5-bfca-59450fd887f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.985s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.736202] env[62619]: DEBUG nova.scheduler.client.report [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1735.754913] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5252404c-d939-5570-f488-94d1d38a5986, 'name': SearchDatastore_Task, 'duration_secs': 0.014263} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.756101] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.756235] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1735.758114] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.758114] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.758114] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1735.758114] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76ebb930-5f5a-46cf-9ddc-4cd9305b5972 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.774907] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1735.775151] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1735.775986] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47bc41ba-764c-44c8-a85f-cd4415232497 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.785628] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1735.785628] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db8566-665b-10ac-4db6-6d298316f4f2" [ 1735.785628] env[62619]: _type = "Task" [ 1735.785628] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.790634] env[62619]: DEBUG nova.compute.manager [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1735.795235] env[62619]: INFO nova.compute.manager [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Rescuing [ 1735.795235] env[62619]: DEBUG oslo_concurrency.lockutils [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "refresh_cache-e302e431-1f95-4ab5-bfca-59450fd887f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.795235] env[62619]: DEBUG oslo_concurrency.lockutils [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "refresh_cache-e302e431-1f95-4ab5-bfca-59450fd887f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.795614] env[62619]: DEBUG nova.network.neutron [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1735.800621] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db8566-665b-10ac-4db6-6d298316f4f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.821822] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a52c445-08e2-431a-bc90-9a12b762606d req-f1fcddb5-260e-4e3a-bdf2-5efd73ac2b07 service nova] Releasing lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.821822] env[62619]: DEBUG nova.compute.manager [req-9a52c445-08e2-431a-bc90-9a12b762606d req-f1fcddb5-260e-4e3a-bdf2-5efd73ac2b07 service nova] [instance: a3101076-36d6-409a-8072-638107e63073] Received event network-vif-deleted-d680f022-949d-41b7-b22b-7900ed9b02f9 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1735.845163] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778158, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.866841] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1735.868141] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1735.868141] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1735.868141] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1735.868141] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1735.868141] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1735.869066] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1735.869066] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1735.869066] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1735.869066] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1735.869066] env[62619]: DEBUG nova.virt.hardware [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1735.870162] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85163076-be55-47d6-9256-5c48e480d0ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.895059] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778159, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.905364] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56682d66-9168-4fdc-8fdd-8256b19f15c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.246540] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.247163] env[62619]: DEBUG nova.compute.manager [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1736.253143] env[62619]: DEBUG oslo_concurrency.lockutils [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.309s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.253378] env[62619]: DEBUG nova.objects.instance [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lazy-loading 'resources' on Instance uuid 34180944-02f7-4115-8178-64f2f2591080 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1736.304255] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52db8566-665b-10ac-4db6-6d298316f4f2, 'name': SearchDatastore_Task, 'duration_secs': 0.06039} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.306774] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b086bcb-87b4-4ff9-835b-aac97cf95452 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.317485] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1736.317485] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e5d1f2-9f21-6790-b514-6d5a38993b60" [ 1736.317485] env[62619]: _type = "Task" [ 1736.317485] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.326585] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e5d1f2-9f21-6790-b514-6d5a38993b60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.332959] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778158, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.381357] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554808} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.381622] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 11869077-b428-413f-9f8f-7eac08d2d9ec/11869077-b428-413f-9f8f-7eac08d2d9ec.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1736.381844] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1736.382110] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25431bd4-7e4f-42c6-94a7-a8a8af980c82 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.391978] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1736.391978] env[62619]: value = "task-1778160" [ 1736.391978] env[62619]: _type = "Task" [ 1736.391978] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.404068] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778160, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.547327] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Acquiring lock "b1c3c213-599d-4cab-8224-d87467d774c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.547695] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Lock "b1c3c213-599d-4cab-8224-d87467d774c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.547923] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Acquiring lock "b1c3c213-599d-4cab-8224-d87467d774c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.548207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Lock "b1c3c213-599d-4cab-8224-d87467d774c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.548380] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Lock "b1c3c213-599d-4cab-8224-d87467d774c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.551728] env[62619]: INFO nova.compute.manager [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Terminating instance [ 1736.623722] env[62619]: DEBUG nova.network.neutron [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Updating instance_info_cache with network_info: [{"id": "279a158f-38d3-41bb-ab72-22a80ceca030", "address": "fa:16:3e:a7:b3:9e", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap279a158f-38", "ovs_interfaceid": "279a158f-38d3-41bb-ab72-22a80ceca030", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.660796] env[62619]: DEBUG nova.compute.manager [req-07efd8ab-d248-47b4-8ac4-2a2e8f2d087d req-22191e16-6554-4e6c-9f24-3893b1caa8f0 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Received event network-vif-plugged-d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1736.661403] env[62619]: DEBUG oslo_concurrency.lockutils [req-07efd8ab-d248-47b4-8ac4-2a2e8f2d087d req-22191e16-6554-4e6c-9f24-3893b1caa8f0 service nova] Acquiring lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.661403] env[62619]: DEBUG oslo_concurrency.lockutils [req-07efd8ab-d248-47b4-8ac4-2a2e8f2d087d req-22191e16-6554-4e6c-9f24-3893b1caa8f0 service nova] Lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.661403] env[62619]: DEBUG oslo_concurrency.lockutils [req-07efd8ab-d248-47b4-8ac4-2a2e8f2d087d req-22191e16-6554-4e6c-9f24-3893b1caa8f0 service nova] Lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.661619] env[62619]: DEBUG nova.compute.manager [req-07efd8ab-d248-47b4-8ac4-2a2e8f2d087d req-22191e16-6554-4e6c-9f24-3893b1caa8f0 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] No waiting events found dispatching network-vif-plugged-d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1736.661707] env[62619]: WARNING nova.compute.manager [req-07efd8ab-d248-47b4-8ac4-2a2e8f2d087d req-22191e16-6554-4e6c-9f24-3893b1caa8f0 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Received unexpected event network-vif-plugged-d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09 for instance with vm_state building and task_state spawning. [ 1736.746910] env[62619]: DEBUG nova.network.neutron [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Successfully updated port: d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1736.760888] env[62619]: DEBUG nova.compute.utils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1736.763136] env[62619]: DEBUG nova.compute.manager [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1736.763136] env[62619]: DEBUG nova.network.neutron [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1736.815176] env[62619]: DEBUG nova.policy [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15bd98bac83146b0899cc8776fece70e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '079ddd8f5dc14fa699b4961995733f95', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1736.834734] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778158, 'name': CreateSnapshot_Task, 'duration_secs': 1.196629} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.837895] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1736.838199] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e5d1f2-9f21-6790-b514-6d5a38993b60, 'name': SearchDatastore_Task, 'duration_secs': 0.011137} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.840981] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19c744f-1721-4479-8329-9598f4e0ddff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.843383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.843655] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 32aed8cd-1583-4253-bfb6-a98610e2f32e/32aed8cd-1583-4253-bfb6-a98610e2f32e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1736.844079] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-046dd6a6-5be9-4053-a004-18ecf6b85911 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.856099] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1736.856099] env[62619]: value = "task-1778161" [ 1736.856099] env[62619]: _type = "Task" [ 1736.856099] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.865702] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.904575] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778160, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.134978} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.904575] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1736.904868] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa84e32d-2867-49dc-bcc5-6538d5709b3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.936028] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 11869077-b428-413f-9f8f-7eac08d2d9ec/11869077-b428-413f-9f8f-7eac08d2d9ec.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1736.939448] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea68d610-ece7-4213-9326-25ea47647adc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.963103] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1736.963103] env[62619]: value = "task-1778162" [ 1736.963103] env[62619]: _type = "Task" [ 1736.963103] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.981926] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778162, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.059449] env[62619]: DEBUG nova.compute.manager [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1737.059689] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1737.059965] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-340b7318-7428-4525-97b5-469ec4cfcc1d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.068792] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for the task: (returnval){ [ 1737.068792] env[62619]: value = "task-1778163" [ 1737.068792] env[62619]: _type = "Task" [ 1737.068792] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.084287] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1778163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.126180] env[62619]: DEBUG oslo_concurrency.lockutils [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "refresh_cache-e302e431-1f95-4ab5-bfca-59450fd887f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.136718] env[62619]: DEBUG nova.network.neutron [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Successfully created port: 78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1737.197094] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049da268-2605-46ae-b3e1-8c6f69a5e8c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.210841] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e018c284-2897-4623-8f0e-9505132351ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.248569] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe27980-a9b3-4651-9b7d-70e17c282d98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.252149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquiring lock "refresh_cache-f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.252280] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquired lock "refresh_cache-f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.252459] env[62619]: DEBUG nova.network.neutron [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1737.263714] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51648fc4-e930-4fa0-ae81-9d708ec288ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.267123] env[62619]: DEBUG nova.compute.manager [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1737.282929] env[62619]: DEBUG nova.compute.provider_tree [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1737.365502] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1737.365965] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d2428707-0ea3-4b46-9904-baf6f8c1a15d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.385748] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778161, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.387309] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1737.387309] env[62619]: value = "task-1778164" [ 1737.387309] env[62619]: _type = "Task" [ 1737.387309] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.397954] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778164, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.478152] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.579759] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1778163, 'name': PowerOffVM_Task, 'duration_secs': 0.269714} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.580133] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1737.580395] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1737.580680] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369005', 'volume_id': 'da513efd-d6e1-4db5-90d2-52a5be9aa233', 'name': 'volume-da513efd-d6e1-4db5-90d2-52a5be9aa233', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b1c3c213-599d-4cab-8224-d87467d774c9', 'attached_at': '', 'detached_at': '', 'volume_id': 'da513efd-d6e1-4db5-90d2-52a5be9aa233', 'serial': 'da513efd-d6e1-4db5-90d2-52a5be9aa233'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1737.581535] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd15a2d-91a7-475b-95bf-365db7991edd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.603609] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a212d9d-0994-41cc-ba5a-ec7d3be28a34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.611165] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f68113-d95b-4633-89ed-b49015691439 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.632074] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a71db4-9405-4acc-807d-0f0dffb52678 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.648820] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] The volume has not been displaced from its original location: [datastore1] volume-da513efd-d6e1-4db5-90d2-52a5be9aa233/volume-da513efd-d6e1-4db5-90d2-52a5be9aa233.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1737.654111] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Reconfiguring VM instance instance-00000039 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1737.654467] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-078380ad-adf1-4ce6-935f-7d5c6e77d14d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.675414] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for the task: (returnval){ [ 1737.675414] env[62619]: value = "task-1778165" [ 1737.675414] env[62619]: _type = "Task" [ 1737.675414] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.686491] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1778165, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.786466] env[62619]: DEBUG nova.scheduler.client.report [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1737.817767] env[62619]: DEBUG nova.network.neutron [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1737.875849] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778161, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594481} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.878625] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 32aed8cd-1583-4253-bfb6-a98610e2f32e/32aed8cd-1583-4253-bfb6-a98610e2f32e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1737.878888] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1737.879215] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca141df1-b37a-4bc1-8df8-7ae2bc769c9a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.888113] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1737.888113] env[62619]: value = "task-1778166" [ 1737.888113] env[62619]: _type = "Task" [ 1737.888113] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.903802] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778164, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.906962] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.974069] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778162, 'name': ReconfigVM_Task, 'duration_secs': 0.654743} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.974442] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 11869077-b428-413f-9f8f-7eac08d2d9ec/11869077-b428-413f-9f8f-7eac08d2d9ec.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1737.975822] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'disk_bus': None, 'encryption_secret_uuid': None, 'boot_index': 0, 'size': 0, 'guest_format': None, 'device_type': 'disk', 'encryption_options': None, 'device_name': '/dev/sda', 'encryption_format': None, 'encrypted': False, 'image_id': '27a858d5-7985-4b17-8b01-50adcd8f566c'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'boot_index': None, 'guest_format': None, 'mount_device': '/dev/sdb', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369108', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'name': 'volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '11869077-b428-413f-9f8f-7eac08d2d9ec', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'serial': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51'}, 'attachment_id': 'c835947a-9cc6-432b-8a33-2cfe22dbcc5b', 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=62619) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1737.976076] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1737.976257] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369108', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'name': 'volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '11869077-b428-413f-9f8f-7eac08d2d9ec', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'serial': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1737.977058] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2045b7-8c41-4132-89e3-73f63d775fb4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.995411] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3326345-d28d-4391-9ac4-9978ff63f1d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.020757] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51/volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1738.021056] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d572c76b-e710-4f96-b5d3-f165347c5a71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.034673] env[62619]: DEBUG nova.network.neutron [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Updating instance_info_cache with network_info: [{"id": "d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09", "address": "fa:16:3e:e7:72:57", "network": {"id": "93cb677b-0f18-4676-a548-7174b284325e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-819439326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c81be375fe34117838fcda4608d4091", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bde2f6cc-fb26-4d71-95a6-57d1ae1c4afd", "external-id": "nsx-vlan-transportzone-206", "segmentation_id": 206, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0aa15bf-fe", "ovs_interfaceid": "d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.043165] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1738.043165] env[62619]: value = "task-1778167" [ 1738.043165] env[62619]: _type = "Task" [ 1738.043165] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.052277] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.187763] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1778165, 'name': ReconfigVM_Task, 'duration_secs': 0.448527} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.188553] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Reconfigured VM instance instance-00000039 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1738.193476] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edbfb152-8e13-4725-b4c8-2f982bf2373f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.209489] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for the task: (returnval){ [ 1738.209489] env[62619]: value = "task-1778168" [ 1738.209489] env[62619]: _type = "Task" [ 1738.209489] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.218274] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1778168, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.277382] env[62619]: DEBUG nova.compute.manager [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1738.292608] env[62619]: DEBUG oslo_concurrency.lockutils [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.039s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.294727] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 13.826s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.295121] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.295411] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1738.295585] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.929s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.295785] env[62619]: DEBUG nova.objects.instance [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lazy-loading 'pci_requests' on Instance uuid 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1738.299642] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4c54c1-ea5d-423c-a4fe-1b43c0412fb5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.311382] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1738.311382] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1738.311382] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1738.311382] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1738.311623] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1738.311623] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1738.311845] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1738.312043] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1738.312228] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1738.312414] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1738.312616] env[62619]: DEBUG nova.virt.hardware [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1738.314060] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06853ddf-ca89-461f-b4f7-e3c900f998bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.319796] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02052bd-aff5-4c5c-9a66-194522212097 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.323243] env[62619]: INFO nova.scheduler.client.report [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted allocations for instance 34180944-02f7-4115-8178-64f2f2591080 [ 1738.341514] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a799450-e930-499e-82c3-bc651b8d9622 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.345078] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff084f6-bd66-499d-897c-4211ccf061b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.363366] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcf1675-6990-49bf-97f8-399d1a30cc30 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.400520] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179523MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1738.400729] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.415332] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778164, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.419072] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072651} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.419392] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1738.420233] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896d64d0-62a6-46e5-8ddc-1351b11b91c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.445156] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 32aed8cd-1583-4253-bfb6-a98610e2f32e/32aed8cd-1583-4253-bfb6-a98610e2f32e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1738.445489] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa6d2dce-a5c1-42a5-b136-e67e4d5a862b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.467126] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1738.467126] env[62619]: value = "task-1778169" [ 1738.467126] env[62619]: _type = "Task" [ 1738.467126] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.480100] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778169, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.539063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Releasing lock "refresh_cache-f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.542031] env[62619]: DEBUG nova.compute.manager [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Instance network_info: |[{"id": "d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09", "address": "fa:16:3e:e7:72:57", "network": {"id": "93cb677b-0f18-4676-a548-7174b284325e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-819439326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c81be375fe34117838fcda4608d4091", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bde2f6cc-fb26-4d71-95a6-57d1ae1c4afd", "external-id": "nsx-vlan-transportzone-206", "segmentation_id": 206, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0aa15bf-fe", "ovs_interfaceid": "d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1738.542031] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:72:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bde2f6cc-fb26-4d71-95a6-57d1ae1c4afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0aa15bf-fe9d-4cf6-8bb3-337b0a223b09', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1738.547922] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Creating folder: Project (1c81be375fe34117838fcda4608d4091). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1738.550015] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db1ce94e-6bf8-41b5-b244-5674229aac38 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.563020] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778167, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.563925] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Created folder: Project (1c81be375fe34117838fcda4608d4091) in parent group-v368875. [ 1738.564200] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Creating folder: Instances. Parent ref: group-v369122. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1738.564456] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b57d5ed-724a-4dc5-8071-a1b490144a30 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.577051] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Created folder: Instances in parent group-v369122. [ 1738.577051] env[62619]: DEBUG oslo.service.loopingcall [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1738.577232] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1738.577328] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d61dfec6-2986-42eb-8454-b4fa42384b31 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.602611] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1738.602611] env[62619]: value = "task-1778172" [ 1738.602611] env[62619]: _type = "Task" [ 1738.602611] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.611948] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778172, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.683426] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1738.685038] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef3bea4b-fde9-4e4c-bf86-2a9d0329effd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.692415] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1738.692415] env[62619]: value = "task-1778173" [ 1738.692415] env[62619]: _type = "Task" [ 1738.692415] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.697943] env[62619]: DEBUG nova.compute.manager [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Received event network-changed-d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1738.698448] env[62619]: DEBUG nova.compute.manager [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Refreshing instance network info cache due to event network-changed-d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1738.698816] env[62619]: DEBUG oslo_concurrency.lockutils [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] Acquiring lock "refresh_cache-f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.698948] env[62619]: DEBUG oslo_concurrency.lockutils [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] Acquired lock "refresh_cache-f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.699077] env[62619]: DEBUG nova.network.neutron [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Refreshing network info cache for port d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1738.709505] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778173, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.723432] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1778168, 'name': ReconfigVM_Task, 'duration_secs': 0.266855} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.723731] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369005', 'volume_id': 'da513efd-d6e1-4db5-90d2-52a5be9aa233', 'name': 'volume-da513efd-d6e1-4db5-90d2-52a5be9aa233', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b1c3c213-599d-4cab-8224-d87467d774c9', 'attached_at': '', 'detached_at': '', 'volume_id': 'da513efd-d6e1-4db5-90d2-52a5be9aa233', 'serial': 'da513efd-d6e1-4db5-90d2-52a5be9aa233'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1738.724189] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1738.725887] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc954b6-bc83-467f-9d2f-ec607a9d5a29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.736921] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1738.737365] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d4c41ca-02f1-45fe-88be-89678640a898 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.795116] env[62619]: DEBUG nova.network.neutron [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Successfully updated port: 78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1738.806804] env[62619]: DEBUG nova.objects.instance [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lazy-loading 'numa_topology' on Instance uuid 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1738.844059] env[62619]: DEBUG oslo_concurrency.lockutils [None req-203d3c57-af84-4c22-a43f-b92daf2523d3 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "34180944-02f7-4115-8178-64f2f2591080" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.798s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.910984] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778164, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.978149] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.999358] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1739.000376] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1739.000683] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Deleting the datastore file [datastore1] b1c3c213-599d-4cab-8224-d87467d774c9 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1739.001308] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a7f4d83-6acb-4c9b-843b-7e92a589e2f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.009060] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for the task: (returnval){ [ 1739.009060] env[62619]: value = "task-1778175" [ 1739.009060] env[62619]: _type = "Task" [ 1739.009060] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.018391] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1778175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.061958] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778167, 'name': ReconfigVM_Task, 'duration_secs': 0.742374} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.062433] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfigured VM instance instance-0000004b to attach disk [datastore1] volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51/volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1739.067930] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f21e7d7b-ffc7-41b2-8fcb-bf44e579c2ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.090275] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1739.090275] env[62619]: value = "task-1778176" [ 1739.090275] env[62619]: _type = "Task" [ 1739.090275] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.100094] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778176, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.113740] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778172, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.203838] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778173, 'name': PowerOffVM_Task, 'duration_secs': 0.303541} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.204130] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1739.205340] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1d8a7e-606f-4743-9d4d-fc9789fee252 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.225698] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dc03da-95a2-4d55-a5db-d0a10643b7a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.260131] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1739.260472] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f045f79-6a88-4738-bf56-e69d911bc190 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.267519] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1739.267519] env[62619]: value = "task-1778177" [ 1739.267519] env[62619]: _type = "Task" [ 1739.267519] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.280914] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1739.281097] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1739.281345] env[62619]: DEBUG oslo_concurrency.lockutils [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.281495] env[62619]: DEBUG oslo_concurrency.lockutils [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.281668] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1739.281913] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c3eb7a1-1961-4092-9dd8-a2d7eb482a16 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.290386] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1739.290590] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1739.291405] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d233baf0-1b8d-4971-b712-6093338ee795 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.296824] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1739.296824] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5224b58c-7dae-6681-af26-9571a0b0fe9e" [ 1739.296824] env[62619]: _type = "Task" [ 1739.296824] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.297460] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.297597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquired lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.297747] env[62619]: DEBUG nova.network.neutron [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1739.308972] env[62619]: INFO nova.compute.claims [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1739.311532] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5224b58c-7dae-6681-af26-9571a0b0fe9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.411045] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778164, 'name': CloneVM_Task, 'duration_secs': 1.943228} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.411390] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Created linked-clone VM from snapshot [ 1739.412451] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b602e0-1b90-4233-b288-722290e03e35 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.420902] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Uploading image d1791572-abf0-49e9-9ccd-ae11e1d9d561 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1739.435720] env[62619]: DEBUG nova.network.neutron [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Updated VIF entry in instance network info cache for port d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1739.436038] env[62619]: DEBUG nova.network.neutron [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Updating instance_info_cache with network_info: [{"id": "d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09", "address": "fa:16:3e:e7:72:57", "network": {"id": "93cb677b-0f18-4676-a548-7174b284325e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-819439326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c81be375fe34117838fcda4608d4091", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bde2f6cc-fb26-4d71-95a6-57d1ae1c4afd", "external-id": "nsx-vlan-transportzone-206", "segmentation_id": 206, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0aa15bf-fe", "ovs_interfaceid": "d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.457514] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1739.457514] env[62619]: value = "vm-369121" [ 1739.457514] env[62619]: _type = "VirtualMachine" [ 1739.457514] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1739.457787] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a826db48-713d-41c5-863c-03a11400ce18 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.465304] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lease: (returnval){ [ 1739.465304] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d6ed10-331b-d5ad-36f8-4119e64f2883" [ 1739.465304] env[62619]: _type = "HttpNfcLease" [ 1739.465304] env[62619]: } obtained for exporting VM: (result){ [ 1739.465304] env[62619]: value = "vm-369121" [ 1739.465304] env[62619]: _type = "VirtualMachine" [ 1739.465304] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1739.465702] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the lease: (returnval){ [ 1739.465702] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d6ed10-331b-d5ad-36f8-4119e64f2883" [ 1739.465702] env[62619]: _type = "HttpNfcLease" [ 1739.465702] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1739.475694] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1739.475694] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d6ed10-331b-d5ad-36f8-4119e64f2883" [ 1739.475694] env[62619]: _type = "HttpNfcLease" [ 1739.475694] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1739.478624] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778169, 'name': ReconfigVM_Task, 'duration_secs': 0.584688} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.478897] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 32aed8cd-1583-4253-bfb6-a98610e2f32e/32aed8cd-1583-4253-bfb6-a98610e2f32e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1739.479531] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3996956e-3c3f-4feb-a231-0ec2e141421b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.488085] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1739.488085] env[62619]: value = "task-1778179" [ 1739.488085] env[62619]: _type = "Task" [ 1739.488085] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.495998] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778179, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.520855] env[62619]: DEBUG oslo_vmware.api [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Task: {'id': task-1778175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.121348} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.521141] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1739.521333] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1739.521516] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1739.522043] env[62619]: INFO nova.compute.manager [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Took 2.46 seconds to destroy the instance on the hypervisor. [ 1739.522431] env[62619]: DEBUG oslo.service.loopingcall [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1739.522648] env[62619]: DEBUG nova.compute.manager [-] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1739.522740] env[62619]: DEBUG nova.network.neutron [-] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1739.601085] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778176, 'name': ReconfigVM_Task, 'duration_secs': 0.237252} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.602185] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369108', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'name': 'volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '11869077-b428-413f-9f8f-7eac08d2d9ec', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'serial': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1739.602185] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24fe7286-b47b-4d05-ac67-31ef0e615f27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.614391] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778172, 'name': CreateVM_Task, 'duration_secs': 0.645649} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.615688] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1739.616027] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1739.616027] env[62619]: value = "task-1778180" [ 1739.616027] env[62619]: _type = "Task" [ 1739.616027] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.616684] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.616847] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.617276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1739.617728] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b006175-cacb-4819-91a8-be18f2edd79f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.626904] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for the task: (returnval){ [ 1739.626904] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b484fb-fb61-cfc3-6417-a7f73f715e6a" [ 1739.626904] env[62619]: _type = "Task" [ 1739.626904] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.630329] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778180, 'name': Rename_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.639855] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b484fb-fb61-cfc3-6417-a7f73f715e6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.819220] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5224b58c-7dae-6681-af26-9571a0b0fe9e, 'name': SearchDatastore_Task, 'duration_secs': 0.009426} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.822735] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baa8ffae-12c9-4ce5-802d-c0cd15cbfff0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.829999] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1739.829999] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528eea0b-6d86-47aa-3bb2-fb6322502b4a" [ 1739.829999] env[62619]: _type = "Task" [ 1739.829999] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.841851] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528eea0b-6d86-47aa-3bb2-fb6322502b4a, 'name': SearchDatastore_Task, 'duration_secs': 0.009667} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.842141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.842415] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e302e431-1f95-4ab5-bfca-59450fd887f0/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. {{(pid=62619) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1739.842674] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2fbbb6c-1548-45ed-9cd6-5788a3a89887 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.849400] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1739.849400] env[62619]: value = "task-1778181" [ 1739.849400] env[62619]: _type = "Task" [ 1739.849400] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.863434] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.864657] env[62619]: DEBUG nova.network.neutron [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1739.939881] env[62619]: DEBUG oslo_concurrency.lockutils [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] Releasing lock "refresh_cache-f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.940214] env[62619]: DEBUG nova.compute.manager [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Received event network-vif-plugged-78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1739.940555] env[62619]: DEBUG oslo_concurrency.lockutils [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] Acquiring lock "b84dd91a-8e08-4476-9683-655357d18370-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.940753] env[62619]: DEBUG oslo_concurrency.lockutils [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] Lock "b84dd91a-8e08-4476-9683-655357d18370-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.940921] env[62619]: DEBUG oslo_concurrency.lockutils [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] Lock "b84dd91a-8e08-4476-9683-655357d18370-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.941528] env[62619]: DEBUG nova.compute.manager [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] No waiting events found dispatching network-vif-plugged-78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1739.941808] env[62619]: WARNING nova.compute.manager [req-d6dc88ab-1da8-45b2-bc6c-2703bc0232b6 req-aedf447f-c638-47c6-8b48-45dd93b11737 service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Received unexpected event network-vif-plugged-78e09ad3-db58-4374-9332-13a1881ffdfd for instance with vm_state building and task_state spawning. [ 1739.978541] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1739.978541] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d6ed10-331b-d5ad-36f8-4119e64f2883" [ 1739.978541] env[62619]: _type = "HttpNfcLease" [ 1739.978541] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1739.979571] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1739.979571] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d6ed10-331b-d5ad-36f8-4119e64f2883" [ 1739.979571] env[62619]: _type = "HttpNfcLease" [ 1739.979571] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1739.980716] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f0fe4f-19c8-44bc-b174-21938831499a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.992150] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b81a-e957-ce6d-8512-fbd421fd9844/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1739.992445] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b81a-e957-ce6d-8512-fbd421fd9844/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1740.081998] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778179, 'name': Rename_Task, 'duration_secs': 0.405259} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.083865] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1740.084737] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e9c13a3-738a-49a6-8a9f-40c1083f287f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.091731] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1740.091731] env[62619]: value = "task-1778182" [ 1740.091731] env[62619]: _type = "Task" [ 1740.091731] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.100894] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.127652] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778180, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.143656] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-01491e96-dae4-40ad-9eb2-a968174661c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.146626] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b484fb-fb61-cfc3-6417-a7f73f715e6a, 'name': SearchDatastore_Task, 'duration_secs': 0.031624} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.147261] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.147531] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1740.147805] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.147956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.149338] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1740.149338] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95591538-a89c-4933-be52-bf63467d3a26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.159151] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1740.159397] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1740.160309] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75f8b430-70ac-47ef-abc3-e6dc6b337e44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.167900] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for the task: (returnval){ [ 1740.167900] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b3bebe-b924-a345-f1ee-c6e56f01f7c8" [ 1740.167900] env[62619]: _type = "Task" [ 1740.167900] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.180301] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b3bebe-b924-a345-f1ee-c6e56f01f7c8, 'name': SearchDatastore_Task, 'duration_secs': 0.009027} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.181196] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44f2f8f2-4ad8-4ec3-a551-7fddd7888c05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.194121] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for the task: (returnval){ [ 1740.194121] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5293f2cd-2f89-1b60-e4b1-db0674a707c8" [ 1740.194121] env[62619]: _type = "Task" [ 1740.194121] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.202770] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5293f2cd-2f89-1b60-e4b1-db0674a707c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.288547] env[62619]: DEBUG nova.network.neutron [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Updating instance_info_cache with network_info: [{"id": "78e09ad3-db58-4374-9332-13a1881ffdfd", "address": "fa:16:3e:06:ad:e4", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78e09ad3-db", "ovs_interfaceid": "78e09ad3-db58-4374-9332-13a1881ffdfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.370068] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778181, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.599688] env[62619]: DEBUG nova.network.neutron [-] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.615943] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778182, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.639532] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778180, 'name': Rename_Task, 'duration_secs': 0.811748} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.639744] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1740.640216] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d2f68da-6e1e-4b85-bc3b-8dcba64ac53f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.654469] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1740.654469] env[62619]: value = "task-1778183" [ 1740.654469] env[62619]: _type = "Task" [ 1740.654469] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.669626] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778183, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.720117] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5293f2cd-2f89-1b60-e4b1-db0674a707c8, 'name': SearchDatastore_Task, 'duration_secs': 0.009239} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.722137] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.722137] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] f3345332-5a22-4a1c-ac74-4e8f2ceb3f15/f3345332-5a22-4a1c-ac74-4e8f2ceb3f15.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1740.722137] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4fff0da-1687-4ef2-85e0-26790d5ed6c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.734423] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for the task: (returnval){ [ 1740.734423] env[62619]: value = "task-1778184" [ 1740.734423] env[62619]: _type = "Task" [ 1740.734423] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.746274] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778184, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.749952] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978c8996-736b-4c0a-9fea-a47b0e18ea21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.759970] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32eb324-cf8b-4470-9c46-da1bf5290144 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.800993] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Releasing lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.801657] env[62619]: DEBUG nova.compute.manager [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Instance network_info: |[{"id": "78e09ad3-db58-4374-9332-13a1881ffdfd", "address": "fa:16:3e:06:ad:e4", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78e09ad3-db", "ovs_interfaceid": "78e09ad3-db58-4374-9332-13a1881ffdfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1740.803320] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:ad:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '678ebbe4-4c53-4eaf-a689-93981310f37d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78e09ad3-db58-4374-9332-13a1881ffdfd', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1740.817291] env[62619]: DEBUG oslo.service.loopingcall [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1740.817408] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb898e16-5476-42cb-994e-11694a6bc055 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.827056] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1740.827056] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3895e532-4996-4576-b3a9-33f20e12b6c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.849849] env[62619]: DEBUG nova.compute.manager [req-0464c78e-2401-496b-8285-fc9c0e63afb2 req-44230cb4-2a5b-4976-bb2f-42e2c4943617 service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Received event network-changed-78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1740.850898] env[62619]: DEBUG nova.compute.manager [req-0464c78e-2401-496b-8285-fc9c0e63afb2 req-44230cb4-2a5b-4976-bb2f-42e2c4943617 service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Refreshing instance network info cache due to event network-changed-78e09ad3-db58-4374-9332-13a1881ffdfd. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1740.850898] env[62619]: DEBUG oslo_concurrency.lockutils [req-0464c78e-2401-496b-8285-fc9c0e63afb2 req-44230cb4-2a5b-4976-bb2f-42e2c4943617 service nova] Acquiring lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.850898] env[62619]: DEBUG oslo_concurrency.lockutils [req-0464c78e-2401-496b-8285-fc9c0e63afb2 req-44230cb4-2a5b-4976-bb2f-42e2c4943617 service nova] Acquired lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.850898] env[62619]: DEBUG nova.network.neutron [req-0464c78e-2401-496b-8285-fc9c0e63afb2 req-44230cb4-2a5b-4976-bb2f-42e2c4943617 service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Refreshing network info cache for port 78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1740.868740] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1978b624-c3cc-4c80-ba09-c62f73612442 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.882887] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1740.882887] env[62619]: value = "task-1778185" [ 1740.882887] env[62619]: _type = "Task" [ 1740.882887] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.904632] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778181, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.837866} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.905140] env[62619]: DEBUG nova.compute.provider_tree [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1740.906692] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e302e431-1f95-4ab5-bfca-59450fd887f0/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. [ 1740.908305] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8129cb0e-36da-4d0a-ab50-282f086d3f26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.916193] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778185, 'name': CreateVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.949133] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] e302e431-1f95-4ab5-bfca-59450fd887f0/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1740.949940] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-263a1646-25f3-479e-94c1-0c1705463053 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.976929] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1740.976929] env[62619]: value = "task-1778186" [ 1740.976929] env[62619]: _type = "Task" [ 1740.976929] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.989279] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778186, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.104039] env[62619]: DEBUG oslo_vmware.api [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778182, 'name': PowerOnVM_Task, 'duration_secs': 0.718273} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.104459] env[62619]: INFO nova.compute.manager [-] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Took 1.58 seconds to deallocate network for instance. [ 1741.104774] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1741.105047] env[62619]: INFO nova.compute.manager [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Took 9.76 seconds to spawn the instance on the hypervisor. [ 1741.105255] env[62619]: DEBUG nova.compute.manager [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1741.108274] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58515d51-2c18-470d-bd6f-b0f404bf3c97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.169176] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778183, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.251934] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778184, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.326371] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquiring lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.326577] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.326853] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquiring lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.327090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.328234] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.329640] env[62619]: INFO nova.compute.manager [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Terminating instance [ 1741.398641] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778185, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.416568] env[62619]: DEBUG nova.scheduler.client.report [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1741.495987] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778186, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.640716] env[62619]: INFO nova.compute.manager [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Took 33.67 seconds to build instance. [ 1741.670243] env[62619]: DEBUG oslo_vmware.api [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778183, 'name': PowerOnVM_Task, 'duration_secs': 0.893527} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.670947] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1741.671339] env[62619]: DEBUG nova.compute.manager [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1741.672332] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d24e360-131f-4b34-a4d4-b8730dc5e7f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.676924] env[62619]: INFO nova.compute.manager [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Took 0.57 seconds to detach 1 volumes for instance. [ 1741.679711] env[62619]: DEBUG nova.compute.manager [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Deleting volume: da513efd-d6e1-4db5-90d2-52a5be9aa233 {{(pid=62619) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1741.755562] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778184, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583416} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.758992] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] f3345332-5a22-4a1c-ac74-4e8f2ceb3f15/f3345332-5a22-4a1c-ac74-4e8f2ceb3f15.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1741.759262] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1741.759839] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3794f2eb-42bd-4a76-b686-92f6d8b58263 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.774024] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for the task: (returnval){ [ 1741.774024] env[62619]: value = "task-1778187" [ 1741.774024] env[62619]: _type = "Task" [ 1741.774024] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.783921] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778187, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.835844] env[62619]: DEBUG nova.compute.manager [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1741.836229] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1741.837133] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99e3a6c-90f5-43fb-a40e-003483094c44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.847537] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1741.847793] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-371bf7b0-e7e1-4f78-9578-4df9ff14b440 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.855687] env[62619]: DEBUG oslo_vmware.api [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for the task: (returnval){ [ 1741.855687] env[62619]: value = "task-1778189" [ 1741.855687] env[62619]: _type = "Task" [ 1741.855687] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.860709] env[62619]: DEBUG nova.network.neutron [req-0464c78e-2401-496b-8285-fc9c0e63afb2 req-44230cb4-2a5b-4976-bb2f-42e2c4943617 service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Updated VIF entry in instance network info cache for port 78e09ad3-db58-4374-9332-13a1881ffdfd. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1741.861188] env[62619]: DEBUG nova.network.neutron [req-0464c78e-2401-496b-8285-fc9c0e63afb2 req-44230cb4-2a5b-4976-bb2f-42e2c4943617 service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Updating instance_info_cache with network_info: [{"id": "78e09ad3-db58-4374-9332-13a1881ffdfd", "address": "fa:16:3e:06:ad:e4", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78e09ad3-db", "ovs_interfaceid": "78e09ad3-db58-4374-9332-13a1881ffdfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.870559] env[62619]: DEBUG oslo_vmware.api [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1778189, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.897651] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778185, 'name': CreateVM_Task, 'duration_secs': 0.702588} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.897826] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1741.898596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1741.898875] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1741.899396] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1741.899720] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a65a0dd-df55-45b1-a9aa-c2808b9688b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.906616] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1741.906616] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d972e1-553d-29ab-5647-c638689b9fb2" [ 1741.906616] env[62619]: _type = "Task" [ 1741.906616] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.918517] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d972e1-553d-29ab-5647-c638689b9fb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.922537] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.627s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.924725] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 12.793s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.977052] env[62619]: INFO nova.network.neutron [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updating port a10c5399-b021-4ea7-8a41-4d58136aff12 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1741.991747] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778186, 'name': ReconfigVM_Task, 'duration_secs': 0.75868} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.992606] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Reconfigured VM instance instance-00000058 to attach disk [datastore1] e302e431-1f95-4ab5-bfca-59450fd887f0/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1741.993140] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6f9952-7a8f-4d82-90b1-0293132c2af5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.025382] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e773d736-0079-475e-a825-294dbdda119b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.045623] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1742.045623] env[62619]: value = "task-1778190" [ 1742.045623] env[62619]: _type = "Task" [ 1742.045623] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.058726] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778190, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.143603] env[62619]: DEBUG oslo_concurrency.lockutils [None req-971d0e79-7d06-4e0e-aee4-eb5fce532859 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lock "32aed8cd-1583-4253-bfb6-a98610e2f32e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.764s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.203264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.248605] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.280637] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778187, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071132} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.280805] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1742.281734] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d7df5c-69dc-4307-b612-dc25d3c0c4a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.306594] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] f3345332-5a22-4a1c-ac74-4e8f2ceb3f15/f3345332-5a22-4a1c-ac74-4e8f2ceb3f15.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1742.307185] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68c6c9e1-07cc-43e8-bf1a-93b85c6bce25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.332409] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for the task: (returnval){ [ 1742.332409] env[62619]: value = "task-1778191" [ 1742.332409] env[62619]: _type = "Task" [ 1742.332409] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.345128] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778191, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.367304] env[62619]: DEBUG oslo_vmware.api [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1778189, 'name': PowerOffVM_Task, 'duration_secs': 0.268558} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.367675] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1742.367864] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1742.368165] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d06e74f9-e48e-4420-9306-d72973217af4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.372875] env[62619]: DEBUG oslo_concurrency.lockutils [req-0464c78e-2401-496b-8285-fc9c0e63afb2 req-44230cb4-2a5b-4976-bb2f-42e2c4943617 service nova] Releasing lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.372875] env[62619]: DEBUG nova.compute.manager [req-0464c78e-2401-496b-8285-fc9c0e63afb2 req-44230cb4-2a5b-4976-bb2f-42e2c4943617 service nova] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Received event network-vif-deleted-77589fc3-af02-4235-859e-fbf8a8322155 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1742.419814] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d972e1-553d-29ab-5647-c638689b9fb2, 'name': SearchDatastore_Task, 'duration_secs': 0.018911} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.420222] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.420544] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1742.420816] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.421065] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.421369] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1742.421676] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-446384c0-bb75-4894-a678-a5c6313d5050 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.430850] env[62619]: INFO nova.compute.claims [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1742.438022] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1742.438022] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1742.438022] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4488d288-666d-4a3d-9673-421c1e9741db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.444673] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1742.444673] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5273c9a9-6f0b-85dc-2883-41a3cb1f4834" [ 1742.444673] env[62619]: _type = "Task" [ 1742.444673] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.457343] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5273c9a9-6f0b-85dc-2883-41a3cb1f4834, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.467608] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1742.467966] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1742.468360] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Deleting the datastore file [datastore1] 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1742.468755] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a917d0f4-bf49-4526-8702-2246ecd0918d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.477675] env[62619]: DEBUG oslo_vmware.api [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for the task: (returnval){ [ 1742.477675] env[62619]: value = "task-1778193" [ 1742.477675] env[62619]: _type = "Task" [ 1742.477675] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.488019] env[62619]: DEBUG oslo_vmware.api [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1778193, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.556855] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778190, 'name': ReconfigVM_Task, 'duration_secs': 0.306845} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.557258] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1742.557499] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3702a704-d6df-4a74-8831-9d5d8e505f71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.567966] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1742.567966] env[62619]: value = "task-1778194" [ 1742.567966] env[62619]: _type = "Task" [ 1742.567966] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.579751] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778194, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.637413] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "88f79718-97d0-432c-8515-b60ab3dfd7e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.637671] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.845222] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.940143] env[62619]: INFO nova.compute.resource_tracker [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating resource usage from migration 1b3a5f25-cf05-4d16-a7d4-796835ed89d7 [ 1742.961175] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5273c9a9-6f0b-85dc-2883-41a3cb1f4834, 'name': SearchDatastore_Task, 'duration_secs': 0.018419} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.962329] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d3d0d7d-0259-469b-89be-3cec02bfb1b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.969569] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1742.969569] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523dcb95-52e7-9673-3ff5-716963fe26e7" [ 1742.969569] env[62619]: _type = "Task" [ 1742.969569] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.986460] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523dcb95-52e7-9673-3ff5-716963fe26e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.992333] env[62619]: DEBUG oslo_vmware.api [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Task: {'id': task-1778193, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.357127} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.992624] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1742.992813] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1742.993025] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1742.993362] env[62619]: INFO nova.compute.manager [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1742.993579] env[62619]: DEBUG oslo.service.loopingcall [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1742.996533] env[62619]: DEBUG nova.compute.manager [-] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1742.996672] env[62619]: DEBUG nova.network.neutron [-] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1743.081072] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778194, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.141361] env[62619]: DEBUG nova.compute.manager [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1743.311766] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f909ff7c-337f-4995-b891-6ac28f16be6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.320829] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79fbaa26-4e96-44b8-802e-84ddfd8a0da7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.367118] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8cc099-25a5-42c7-b3a7-70f3c907897b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.379636] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b7ced9-123a-412c-a096-62faaa3a6965 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.383685] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778191, 'name': ReconfigVM_Task, 'duration_secs': 0.606083} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.383974] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Reconfigured VM instance instance-0000005a to attach disk [datastore1] f3345332-5a22-4a1c-ac74-4e8f2ceb3f15/f3345332-5a22-4a1c-ac74-4e8f2ceb3f15.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1743.385421] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0b51cc9-dabd-4e01-acb1-0ad80fba8974 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.397922] env[62619]: DEBUG nova.compute.provider_tree [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1743.402768] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for the task: (returnval){ [ 1743.402768] env[62619]: value = "task-1778195" [ 1743.402768] env[62619]: _type = "Task" [ 1743.402768] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.410258] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778195, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.483136] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523dcb95-52e7-9673-3ff5-716963fe26e7, 'name': SearchDatastore_Task, 'duration_secs': 0.01742} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.483695] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.484021] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] b84dd91a-8e08-4476-9683-655357d18370/b84dd91a-8e08-4476-9683-655357d18370.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1743.484622] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-683943e6-558a-4368-b084-2105532933f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.494325] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1743.494325] env[62619]: value = "task-1778196" [ 1743.494325] env[62619]: _type = "Task" [ 1743.494325] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.504633] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778196, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.558712] env[62619]: DEBUG nova.compute.manager [req-afa604b5-cd70-4d05-b61c-5aae6e799bfb req-12357fbf-582e-4686-9be2-b94f3e6360d9 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Received event network-changed-2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1743.558935] env[62619]: DEBUG nova.compute.manager [req-afa604b5-cd70-4d05-b61c-5aae6e799bfb req-12357fbf-582e-4686-9be2-b94f3e6360d9 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Refreshing instance network info cache due to event network-changed-2924458a-bf48-482f-ab31-ad34e83a94d4. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1743.559558] env[62619]: DEBUG oslo_concurrency.lockutils [req-afa604b5-cd70-4d05-b61c-5aae6e799bfb req-12357fbf-582e-4686-9be2-b94f3e6360d9 service nova] Acquiring lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.559794] env[62619]: DEBUG oslo_concurrency.lockutils [req-afa604b5-cd70-4d05-b61c-5aae6e799bfb req-12357fbf-582e-4686-9be2-b94f3e6360d9 service nova] Acquired lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.560024] env[62619]: DEBUG nova.network.neutron [req-afa604b5-cd70-4d05-b61c-5aae6e799bfb req-12357fbf-582e-4686-9be2-b94f3e6360d9 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Refreshing network info cache for port 2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1743.582749] env[62619]: DEBUG oslo_vmware.api [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778194, 'name': PowerOnVM_Task, 'duration_secs': 0.67102} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.582941] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1743.585798] env[62619]: DEBUG nova.compute.manager [None req-770d8fa5-d9a7-4326-ac80-715af04a3b56 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1743.587054] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ceeb46f-4900-4525-bff5-4bcdf6b6e8dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.672157] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.902456] env[62619]: DEBUG nova.scheduler.client.report [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1743.923104] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778195, 'name': Rename_Task, 'duration_secs': 0.222041} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.928023] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1743.928023] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37e61bcb-865e-4774-a4c6-d8eaafe94f5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.934954] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for the task: (returnval){ [ 1743.934954] env[62619]: value = "task-1778197" [ 1743.934954] env[62619]: _type = "Task" [ 1743.934954] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.945787] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778197, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.006955] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778196, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.190970] env[62619]: DEBUG nova.compute.manager [req-a4c6a375-350f-441a-b08f-1337d671ac94 req-ab333591-22f7-416b-a952-4cc94943e295 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Received event network-vif-deleted-4d9eadaf-f867-4642-bf56-7866858ac8b0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1744.191220] env[62619]: INFO nova.compute.manager [req-a4c6a375-350f-441a-b08f-1337d671ac94 req-ab333591-22f7-416b-a952-4cc94943e295 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Neutron deleted interface 4d9eadaf-f867-4642-bf56-7866858ac8b0; detaching it from the instance and deleting it from the info cache [ 1744.191493] env[62619]: DEBUG nova.network.neutron [req-a4c6a375-350f-441a-b08f-1337d671ac94 req-ab333591-22f7-416b-a952-4cc94943e295 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.317859] env[62619]: DEBUG nova.network.neutron [req-afa604b5-cd70-4d05-b61c-5aae6e799bfb req-12357fbf-582e-4686-9be2-b94f3e6360d9 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updated VIF entry in instance network info cache for port 2924458a-bf48-482f-ab31-ad34e83a94d4. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1744.318049] env[62619]: DEBUG nova.network.neutron [req-afa604b5-cd70-4d05-b61c-5aae6e799bfb req-12357fbf-582e-4686-9be2-b94f3e6360d9 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updating instance_info_cache with network_info: [{"id": "2924458a-bf48-482f-ab31-ad34e83a94d4", "address": "fa:16:3e:c5:5c:af", "network": {"id": "c853257c-5523-4c7c-ac39-b96dd377e1fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1460025320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767e455ac6ef43d1b587e3e953ed8a9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2924458a-bf", "ovs_interfaceid": "2924458a-bf48-482f-ab31-ad34e83a94d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.415272] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.490s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.415507] env[62619]: INFO nova.compute.manager [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Migrating [ 1744.427602] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.632s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.427892] env[62619]: DEBUG nova.objects.instance [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lazy-loading 'resources' on Instance uuid a3101076-36d6-409a-8072-638107e63073 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1744.429753] env[62619]: DEBUG nova.network.neutron [-] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.457110] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778197, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.469032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.469224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.469410] env[62619]: DEBUG nova.network.neutron [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1744.510465] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778196, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681096} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.511598] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] b84dd91a-8e08-4476-9683-655357d18370/b84dd91a-8e08-4476-9683-655357d18370.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1744.511598] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1744.513709] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1a02262-8a95-4fe5-ae7b-fccbd6099c4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.522578] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1744.522578] env[62619]: value = "task-1778198" [ 1744.522578] env[62619]: _type = "Task" [ 1744.522578] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.534445] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778198, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.698123] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c7c0e7a-b0c7-4fbc-9b40-8bd608bafbed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.708197] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54412c0-a775-42f0-b3a2-c5701426cdd9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.750571] env[62619]: DEBUG nova.compute.manager [req-a4c6a375-350f-441a-b08f-1337d671ac94 req-ab333591-22f7-416b-a952-4cc94943e295 service nova] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Detach interface failed, port_id=4d9eadaf-f867-4642-bf56-7866858ac8b0, reason: Instance 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1744.821751] env[62619]: DEBUG oslo_concurrency.lockutils [req-afa604b5-cd70-4d05-b61c-5aae6e799bfb req-12357fbf-582e-4686-9be2-b94f3e6360d9 service nova] Releasing lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.942682] env[62619]: INFO nova.compute.manager [-] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Took 1.94 seconds to deallocate network for instance. [ 1744.943946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.944083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.947546] env[62619]: DEBUG nova.network.neutron [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1744.964513] env[62619]: DEBUG oslo_vmware.api [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778197, 'name': PowerOnVM_Task, 'duration_secs': 0.693986} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.966044] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1744.966044] env[62619]: INFO nova.compute.manager [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Took 9.18 seconds to spawn the instance on the hypervisor. [ 1744.966236] env[62619]: DEBUG nova.compute.manager [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1744.967081] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4253aa9c-05a7-4374-baaa-87521fa8ab74 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.035788] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778198, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079727} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.035788] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1745.036600] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a692b539-46c9-412c-a29a-3992ab2cdb5e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.066387] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] b84dd91a-8e08-4476-9683-655357d18370/b84dd91a-8e08-4476-9683-655357d18370.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1745.069969] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-763ea30f-6110-456f-b070-0c338f0ea5d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.097686] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1745.097686] env[62619]: value = "task-1778199" [ 1745.097686] env[62619]: _type = "Task" [ 1745.097686] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.109858] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778199, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.163549] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquiring lock "e19650da-cc3d-4350-be3e-dc776ce68206" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.163549] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lock "e19650da-cc3d-4350-be3e-dc776ce68206" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.325996] env[62619]: DEBUG nova.network.neutron [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updating instance_info_cache with network_info: [{"id": "a10c5399-b021-4ea7-8a41-4d58136aff12", "address": "fa:16:3e:14:4c:3d", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa10c5399-b0", "ovs_interfaceid": "a10c5399-b021-4ea7-8a41-4d58136aff12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1745.353388] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d38fd1-22be-4d55-8e92-884997d8ef5e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.361925] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cab109-1724-4eba-a027-e7ccc2641f78 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.399873] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6894305-9031-4b3c-8075-31a882f55068 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.410739] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dc20cc-a86b-4dc2-8382-8ece1963c320 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.427025] env[62619]: DEBUG nova.compute.provider_tree [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1745.461918] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.493559] env[62619]: INFO nova.compute.manager [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Took 28.90 seconds to build instance. [ 1745.606941] env[62619]: DEBUG nova.compute.manager [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Received event network-vif-plugged-a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1745.606941] env[62619]: DEBUG oslo_concurrency.lockutils [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] Acquiring lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.607240] env[62619]: DEBUG oslo_concurrency.lockutils [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.607314] env[62619]: DEBUG oslo_concurrency.lockutils [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.607450] env[62619]: DEBUG nova.compute.manager [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] No waiting events found dispatching network-vif-plugged-a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1745.607768] env[62619]: WARNING nova.compute.manager [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Received unexpected event network-vif-plugged-a10c5399-b021-4ea7-8a41-4d58136aff12 for instance with vm_state shelved_offloaded and task_state spawning. [ 1745.607768] env[62619]: DEBUG nova.compute.manager [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Received event network-changed-a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1745.607952] env[62619]: DEBUG nova.compute.manager [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Refreshing instance network info cache due to event network-changed-a10c5399-b021-4ea7-8a41-4d58136aff12. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1745.608203] env[62619]: DEBUG oslo_concurrency.lockutils [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] Acquiring lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.613514] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.668655] env[62619]: DEBUG nova.compute.manager [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1745.837213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.841067] env[62619]: DEBUG oslo_concurrency.lockutils [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] Acquired lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.841067] env[62619]: DEBUG nova.network.neutron [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Refreshing network info cache for port a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1745.876392] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='189338d0d6befdd86722d24621e23667',container_format='bare',created_at=2024-12-11T22:54:24Z,direct_url=,disk_format='vmdk',id=034267b5-5870-4201-8726-91111429c131,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1528470308-shelved',owner='bf557954d79d4bb1939f6e65d4ed00b5',properties=ImageMetaProps,protected=,size=31590912,status='active',tags=,updated_at=2024-12-11T22:54:41Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1745.876732] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1745.877147] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1745.877384] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1745.877724] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1745.877889] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1745.878175] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1745.878359] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1745.878548] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1745.878765] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1745.879020] env[62619]: DEBUG nova.virt.hardware [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1745.880944] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf154020-71c2-4627-b367-8675d69535f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.893473] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e581591a-56a2-460f-939e-47f103b046e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.915752] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:4c:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a10c5399-b021-4ea7-8a41-4d58136aff12', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1745.925174] env[62619]: DEBUG oslo.service.loopingcall [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1745.925315] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1745.925591] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a9d6aa8-433c-4a20-aca2-e85838609b34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.943052] env[62619]: DEBUG nova.scheduler.client.report [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1745.955725] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1745.955725] env[62619]: value = "task-1778200" [ 1745.955725] env[62619]: _type = "Task" [ 1745.955725] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.964751] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778200, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.995787] env[62619]: DEBUG oslo_concurrency.lockutils [None req-271d22f2-cee8-43bc-99e2-b12794ddad35 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.415s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.028518] env[62619]: DEBUG nova.network.neutron [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance_info_cache with network_info: [{"id": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "address": "fa:16:3e:7e:df:71", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9567749e-9f", "ovs_interfaceid": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.110976] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778199, 'name': ReconfigVM_Task, 'duration_secs': 0.786459} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.112232] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Reconfigured VM instance instance-0000005b to attach disk [datastore1] b84dd91a-8e08-4476-9683-655357d18370/b84dd91a-8e08-4476-9683-655357d18370.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1746.112232] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-099ea385-2787-407d-b737-665d048709d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.118452] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1746.118452] env[62619]: value = "task-1778201" [ 1746.118452] env[62619]: _type = "Task" [ 1746.118452] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.127785] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778201, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.194112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.451603] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.024s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.454891] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 8.054s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.469842] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778200, 'name': CreateVM_Task, 'duration_secs': 0.463604} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.470144] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1746.471096] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1746.471590] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1746.472164] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1746.473508] env[62619]: INFO nova.scheduler.client.report [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted allocations for instance a3101076-36d6-409a-8072-638107e63073 [ 1746.475723] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-659352f1-6260-4479-a96a-2729a71a0877 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.487539] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1746.487539] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52314a9c-4f05-3763-bd58-cb24be71d34d" [ 1746.487539] env[62619]: _type = "Task" [ 1746.487539] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.496803] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52314a9c-4f05-3763-bd58-cb24be71d34d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.530950] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1746.632328] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778201, 'name': Rename_Task, 'duration_secs': 0.244514} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.632615] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1746.632883] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fdbeeb0-3766-47b2-81d1-4cbec82399ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.641603] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1746.641603] env[62619]: value = "task-1778202" [ 1746.641603] env[62619]: _type = "Task" [ 1746.641603] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.651533] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778202, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.725561] env[62619]: DEBUG nova.network.neutron [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updated VIF entry in instance network info cache for port a10c5399-b021-4ea7-8a41-4d58136aff12. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1746.725561] env[62619]: DEBUG nova.network.neutron [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updating instance_info_cache with network_info: [{"id": "a10c5399-b021-4ea7-8a41-4d58136aff12", "address": "fa:16:3e:14:4c:3d", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa10c5399-b0", "ovs_interfaceid": "a10c5399-b021-4ea7-8a41-4d58136aff12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.876911] env[62619]: INFO nova.compute.manager [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Rescuing [ 1746.877763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1746.879123] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1746.879123] env[62619]: DEBUG nova.network.neutron [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1746.987430] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54706f70-c7a8-4ec6-befa-54677bbd265f tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "a3101076-36d6-409a-8072-638107e63073" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.706s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.001924] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1747.002609] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Processing image 034267b5-5870-4201-8726-91111429c131 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1747.003348] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131/034267b5-5870-4201-8726-91111429c131.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.003905] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "[datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131/034267b5-5870-4201-8726-91111429c131.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.004342] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1747.004769] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27145973-c360-4f01-bb77-f413fa74fc14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.017311] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1747.018208] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1747.020017] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f11812f-d2e5-4eab-a5f8-6755aa4d6552 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.025577] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1747.025577] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d17a13-5b92-9a8b-399a-fd6b00daaddf" [ 1747.025577] env[62619]: _type = "Task" [ 1747.025577] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.036367] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d17a13-5b92-9a8b-399a-fd6b00daaddf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.152465] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778202, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.205838] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "e34a8173-952b-4ddc-90cf-3681387733fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.206227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e34a8173-952b-4ddc-90cf-3681387733fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.206535] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "e34a8173-952b-4ddc-90cf-3681387733fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.206774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e34a8173-952b-4ddc-90cf-3681387733fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.206956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e34a8173-952b-4ddc-90cf-3681387733fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.209436] env[62619]: INFO nova.compute.manager [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Terminating instance [ 1747.227557] env[62619]: DEBUG oslo_concurrency.lockutils [req-d64e31e4-dc29-418d-82a6-1f73d88731a8 req-b5c304d2-1599-4155-9bbd-65ba13361f5c service nova] Releasing lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1747.479456] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Applying migration context for instance 7cb51b51-514d-4223-a82a-5cdbdab9482a as it has an incoming, in-progress migration 1b3a5f25-cf05-4d16-a7d4-796835ed89d7. Migration status is migrating {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1747.481544] env[62619]: INFO nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating resource usage from migration 1b3a5f25-cf05-4d16-a7d4-796835ed89d7 [ 1747.508531] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.508706] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e32cb991-a018-4b55-8cdf-378e212c8434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.508847] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance b1c3c213-599d-4cab-8224-d87467d774c9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1747.508967] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 5cf7ca57-351f-48ab-8758-b30f50cd607f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.509112] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1747.509231] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 11869077-b428-413f-9f8f-7eac08d2d9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.509346] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4983b333-debb-4a2b-b28d-b321f0d8d7d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.509485] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e34a8173-952b-4ddc-90cf-3681387733fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.509598] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4858096a-9683-4a7c-bbeb-4e6b2f5401cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.509717] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.509806] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 917960ca-3870-4e4e-aafe-3c6d77cf7c51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.509920] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 8745aa7f-9848-4320-94b5-08b7e3bccf80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.510035] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e302e431-1f95-4ab5-bfca-59450fd887f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.510151] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 32aed8cd-1583-4253-bfb6-a98610e2f32e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.510257] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.510365] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance f3345332-5a22-4a1c-ac74-4e8f2ceb3f15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.510467] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance b84dd91a-8e08-4476-9683-655357d18370 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.510572] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Migration 1b3a5f25-cf05-4d16-a7d4-796835ed89d7 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1747.510896] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 7cb51b51-514d-4223-a82a-5cdbdab9482a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.543368] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Preparing fetch location {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1747.543712] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Fetch image to [datastore1] OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f/OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f.vmdk {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1747.543824] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Downloading stream optimized image 034267b5-5870-4201-8726-91111429c131 to [datastore1] OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f/OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f.vmdk on the data store datastore1 as vApp {{(pid=62619) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1747.543994] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Downloading image file data 034267b5-5870-4201-8726-91111429c131 to the ESX as VM named 'OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f' {{(pid=62619) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1747.645039] env[62619]: DEBUG oslo_vmware.rw_handles [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1747.645039] env[62619]: value = "resgroup-9" [ 1747.645039] env[62619]: _type = "ResourcePool" [ 1747.645039] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1747.645494] env[62619]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f65a3b9d-766a-4d7a-8c0b-242bc61f178c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.673572] env[62619]: DEBUG oslo_vmware.api [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778202, 'name': PowerOnVM_Task, 'duration_secs': 0.8505} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.675208] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1747.675495] env[62619]: INFO nova.compute.manager [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1747.675701] env[62619]: DEBUG nova.compute.manager [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1747.676274] env[62619]: DEBUG oslo_vmware.rw_handles [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lease: (returnval){ [ 1747.676274] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5258ec6a-52a3-782c-6bc1-e5d60c86443a" [ 1747.676274] env[62619]: _type = "HttpNfcLease" [ 1747.676274] env[62619]: } obtained for vApp import into resource pool (val){ [ 1747.676274] env[62619]: value = "resgroup-9" [ 1747.676274] env[62619]: _type = "ResourcePool" [ 1747.676274] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1747.676923] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the lease: (returnval){ [ 1747.676923] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5258ec6a-52a3-782c-6bc1-e5d60c86443a" [ 1747.676923] env[62619]: _type = "HttpNfcLease" [ 1747.676923] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1747.680180] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79fed2f2-0cca-431a-8a9c-8a5209815aaf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.693129] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1747.693129] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5258ec6a-52a3-782c-6bc1-e5d60c86443a" [ 1747.693129] env[62619]: _type = "HttpNfcLease" [ 1747.693129] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1747.714080] env[62619]: DEBUG nova.network.neutron [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Updating instance_info_cache with network_info: [{"id": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "address": "fa:16:3e:da:01:a7", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9abe90c7-65", "ovs_interfaceid": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.716925] env[62619]: DEBUG nova.compute.manager [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1747.720120] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1747.720120] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebf3f72-ff21-4b41-a352-ceffd24803da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.733621] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1747.734077] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3de1d08f-00db-454b-a339-084c3a7bda7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.743245] env[62619]: DEBUG oslo_vmware.api [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1747.743245] env[62619]: value = "task-1778204" [ 1747.743245] env[62619]: _type = "Task" [ 1747.743245] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.756047] env[62619]: DEBUG nova.compute.manager [req-47eb33d6-f996-4efe-bdcf-a7a954767add req-af0ae5b6-17ec-4980-bab9-e1319b34dec7 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Received event network-changed-d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1747.756411] env[62619]: DEBUG nova.compute.manager [req-47eb33d6-f996-4efe-bdcf-a7a954767add req-af0ae5b6-17ec-4980-bab9-e1319b34dec7 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Refreshing instance network info cache due to event network-changed-d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1747.756766] env[62619]: DEBUG oslo_concurrency.lockutils [req-47eb33d6-f996-4efe-bdcf-a7a954767add req-af0ae5b6-17ec-4980-bab9-e1319b34dec7 service nova] Acquiring lock "refresh_cache-f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.757832] env[62619]: DEBUG oslo_concurrency.lockutils [req-47eb33d6-f996-4efe-bdcf-a7a954767add req-af0ae5b6-17ec-4980-bab9-e1319b34dec7 service nova] Acquired lock "refresh_cache-f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.757832] env[62619]: DEBUG nova.network.neutron [req-47eb33d6-f996-4efe-bdcf-a7a954767add req-af0ae5b6-17ec-4980-bab9-e1319b34dec7 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Refreshing network info cache for port d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1747.765160] env[62619]: DEBUG oslo_vmware.api [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778204, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.014823] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 88f79718-97d0-432c-8515-b60ab3dfd7e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1748.055227] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78bcca46-55c0-41bd-b344-7a52a1c153f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.077313] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance '7cb51b51-514d-4223-a82a-5cdbdab9482a' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1748.189788] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1748.189788] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5258ec6a-52a3-782c-6bc1-e5d60c86443a" [ 1748.189788] env[62619]: _type = "HttpNfcLease" [ 1748.189788] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1748.202876] env[62619]: INFO nova.compute.manager [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Took 30.15 seconds to build instance. [ 1748.219597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1748.254717] env[62619]: DEBUG oslo_vmware.api [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778204, 'name': PowerOffVM_Task, 'duration_secs': 0.337174} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.255123] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1748.255123] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1748.255406] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d67e64a8-896a-479c-870e-569e238ac18d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.409231] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1748.409675] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1748.409942] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleting the datastore file [datastore1] e34a8173-952b-4ddc-90cf-3681387733fa {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1748.411035] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a04a593c-5503-4ad4-bfd3-36ddd1b8402b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.418000] env[62619]: DEBUG oslo_vmware.api [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1748.418000] env[62619]: value = "task-1778206" [ 1748.418000] env[62619]: _type = "Task" [ 1748.418000] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.428727] env[62619]: DEBUG oslo_vmware.api [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778206, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.518638] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e19650da-cc3d-4350-be3e-dc776ce68206 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1748.518638] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1748.518842] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1748.584299] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1748.584864] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8101c30-ae4a-4c47-ab34-d187962121db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.599122] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1748.599122] env[62619]: value = "task-1778207" [ 1748.599122] env[62619]: _type = "Task" [ 1748.599122] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.610115] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.693660] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1748.693660] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5258ec6a-52a3-782c-6bc1-e5d60c86443a" [ 1748.693660] env[62619]: _type = "HttpNfcLease" [ 1748.693660] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1748.702902] env[62619]: DEBUG nova.network.neutron [req-47eb33d6-f996-4efe-bdcf-a7a954767add req-af0ae5b6-17ec-4980-bab9-e1319b34dec7 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Updated VIF entry in instance network info cache for port d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1748.703539] env[62619]: DEBUG nova.network.neutron [req-47eb33d6-f996-4efe-bdcf-a7a954767add req-af0ae5b6-17ec-4980-bab9-e1319b34dec7 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Updating instance_info_cache with network_info: [{"id": "d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09", "address": "fa:16:3e:e7:72:57", "network": {"id": "93cb677b-0f18-4676-a548-7174b284325e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-819439326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c81be375fe34117838fcda4608d4091", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bde2f6cc-fb26-4d71-95a6-57d1ae1c4afd", "external-id": "nsx-vlan-transportzone-206", "segmentation_id": 206, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0aa15bf-fe", "ovs_interfaceid": "d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.706383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a7a97174-73d6-403e-8a6c-e489bcac3865 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "b84dd91a-8e08-4476-9683-655357d18370" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.659s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.896135] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7896f72f-20cb-4d83-95e0-ae36d388688d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.905248] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2a97c6-7d8e-4a02-b82e-e31a8ced7cd4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.946260] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f61683-da36-45f3-8ace-65929d9b1c5d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.954293] env[62619]: DEBUG oslo_vmware.api [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778206, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.484913} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.957751] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1748.957751] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1748.957751] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1748.957751] env[62619]: INFO nova.compute.manager [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1748.957937] env[62619]: DEBUG oslo.service.loopingcall [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1748.958134] env[62619]: DEBUG nova.compute.manager [-] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1748.958258] env[62619]: DEBUG nova.network.neutron [-] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1748.961084] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6eafe0-e1f1-45a0-a3ec-c8c882d9bcf9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.977835] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1749.109436] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778207, 'name': PowerOffVM_Task, 'duration_secs': 0.398642} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.109732] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1749.109916] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance '7cb51b51-514d-4223-a82a-5cdbdab9482a' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1749.194032] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1749.194032] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5258ec6a-52a3-782c-6bc1-e5d60c86443a" [ 1749.194032] env[62619]: _type = "HttpNfcLease" [ 1749.194032] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1749.194032] env[62619]: DEBUG oslo_vmware.rw_handles [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1749.194032] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5258ec6a-52a3-782c-6bc1-e5d60c86443a" [ 1749.194032] env[62619]: _type = "HttpNfcLease" [ 1749.194032] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1749.197334] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e227b121-1d27-42b4-a052-93d445bad00d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.205905] env[62619]: DEBUG oslo_vmware.rw_handles [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabebd-7288-03a7-4614-f5967ffc731b/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1749.206557] env[62619]: DEBUG oslo_vmware.rw_handles [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Creating HTTP connection to write to file with size = 31590912 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabebd-7288-03a7-4614-f5967ffc731b/disk-0.vmdk. {{(pid=62619) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1749.211116] env[62619]: DEBUG oslo_concurrency.lockutils [req-47eb33d6-f996-4efe-bdcf-a7a954767add req-af0ae5b6-17ec-4980-bab9-e1319b34dec7 service nova] Releasing lock "refresh_cache-f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1749.290885] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1e20e197-6088-4ad9-bc45-14a5ba809624 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.484197] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1749.619023] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1749.619023] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1749.619023] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1749.619023] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1749.619023] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1749.619023] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1749.619023] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1749.619782] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1749.620364] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1749.621025] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1749.621426] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1749.629139] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6265e2d-da7f-468e-ba4b-493bc61d714e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.654758] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1749.654758] env[62619]: value = "task-1778208" [ 1749.654758] env[62619]: _type = "Task" [ 1749.654758] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.668843] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778208, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.990296] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1749.990296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.535s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.990296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.787s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.990296] env[62619]: DEBUG nova.objects.instance [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1750.169313] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778208, 'name': ReconfigVM_Task, 'duration_secs': 0.247209} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.172050] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance '7cb51b51-514d-4223-a82a-5cdbdab9482a' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1750.254587] env[62619]: DEBUG nova.network.neutron [-] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.338427] env[62619]: DEBUG nova.compute.manager [req-15e24d32-b627-4c78-b45d-e5f2676f98d0 req-38fc5482-023f-4284-8a87-75e77b0f4286 service nova] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Received event network-vif-deleted-6ccfd58b-04e7-42b0-b5a3-e63d420ab341 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1750.435793] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b81a-e957-ce6d-8512-fbd421fd9844/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1750.437274] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5952fd48-8d16-4082-bff5-2a1931697f21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.450320] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b81a-e957-ce6d-8512-fbd421fd9844/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1750.450645] env[62619]: ERROR oslo_vmware.rw_handles [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b81a-e957-ce6d-8512-fbd421fd9844/disk-0.vmdk due to incomplete transfer. [ 1750.451052] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-dab835df-d0a9-43b1-b394-823deb29459a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.462950] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b81a-e957-ce6d-8512-fbd421fd9844/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1750.463282] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Uploaded image d1791572-abf0-49e9-9ccd-ae11e1d9d561 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1750.469022] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1750.469022] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7fdd7ae7-ad16-4260-85b6-3ff6c13e9b22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.474105] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1750.474105] env[62619]: value = "task-1778209" [ 1750.474105] env[62619]: _type = "Task" [ 1750.474105] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.490032] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778209, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.682630] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1750.682869] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1750.683029] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1750.683212] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1750.683348] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1750.683487] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1750.683882] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1750.683882] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1750.685461] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1750.685461] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1750.685629] env[62619]: DEBUG nova.virt.hardware [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1750.692499] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Reconfiguring VM instance instance-00000055 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1750.695311] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb49c663-744e-4371-a474-8c7ccfc77d6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.720332] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1750.720332] env[62619]: value = "task-1778210" [ 1750.720332] env[62619]: _type = "Task" [ 1750.720332] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.735997] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778210, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.758590] env[62619]: INFO nova.compute.manager [-] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Took 1.80 seconds to deallocate network for instance. [ 1750.834160] env[62619]: DEBUG oslo_vmware.rw_handles [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Completed reading data from the image iterator. {{(pid=62619) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1750.834441] env[62619]: DEBUG oslo_vmware.rw_handles [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabebd-7288-03a7-4614-f5967ffc731b/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1750.835452] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51a531e-0bd7-4b6c-8153-3366e4801b44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.843808] env[62619]: DEBUG oslo_vmware.rw_handles [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabebd-7288-03a7-4614-f5967ffc731b/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1750.843808] env[62619]: DEBUG oslo_vmware.rw_handles [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabebd-7288-03a7-4614-f5967ffc731b/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1750.843808] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6cf8afb3-d702-489b-9e30-e2feb3f1d0c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.985518] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778209, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.005580] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3d405721-0b52-480d-bd07-5a995d0794a3 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.006812] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.758s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.007064] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.009402] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.337s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.010895] env[62619]: INFO nova.compute.claims [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1751.042419] env[62619]: INFO nova.scheduler.client.report [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Deleted allocations for instance b1c3c213-599d-4cab-8224-d87467d774c9 [ 1751.232473] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778210, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.267335] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.317414] env[62619]: DEBUG oslo_vmware.rw_handles [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabebd-7288-03a7-4614-f5967ffc731b/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1751.317414] env[62619]: INFO nova.virt.vmwareapi.images [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Downloaded image file data 034267b5-5870-4201-8726-91111429c131 [ 1751.318424] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00019834-1070-4c46-ae75-7d697b3b8069 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.337774] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f14842a2-5f88-4ff6-92f2-575dad501fcb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.388186] env[62619]: INFO nova.virt.vmwareapi.images [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] The imported VM was unregistered [ 1751.390052] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Caching image {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1751.390312] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Creating directory with path [datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1751.390597] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61609023-c273-475b-88b7-57eeda0114ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.420793] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Created directory with path [datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1751.421013] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f/OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f.vmdk to [datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131/034267b5-5870-4201-8726-91111429c131.vmdk. {{(pid=62619) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1751.421298] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-e6489388-42c2-4e10-871b-2e8025367c48 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.429111] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1751.429111] env[62619]: value = "task-1778212" [ 1751.429111] env[62619]: _type = "Task" [ 1751.429111] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.435960] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778212, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.486659] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778209, 'name': Destroy_Task, 'duration_secs': 0.758495} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.486921] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Destroyed the VM [ 1751.487476] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1751.487731] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5ec7ef93-5818-4f2b-ab0a-f129a362a6ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.493793] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1751.493793] env[62619]: value = "task-1778213" [ 1751.493793] env[62619]: _type = "Task" [ 1751.493793] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.501951] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778213, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.550132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df617536-fc67-4841-a9d2-4c6d830c1d9b tempest-ServersTestBootFromVolume-1840168648 tempest-ServersTestBootFromVolume-1840168648-project-member] Lock "b1c3c213-599d-4cab-8224-d87467d774c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.002s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.733049] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778210, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.811924] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1751.812285] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07ef50b8-3895-45c5-b69a-4c3432590dd6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.819971] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1751.819971] env[62619]: value = "task-1778214" [ 1751.819971] env[62619]: _type = "Task" [ 1751.819971] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.828561] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.940037] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778212, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.005866] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778213, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.237989] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778210, 'name': ReconfigVM_Task, 'duration_secs': 1.224156} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.238484] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Reconfigured VM instance instance-00000055 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1752.239379] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141de540-a2e7-4a1d-8aec-cb25ed2aeb8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.271233] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 7cb51b51-514d-4223-a82a-5cdbdab9482a/7cb51b51-514d-4223-a82a-5cdbdab9482a.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1752.274383] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fe87aa4-ffc4-48dc-b204-96f64bcba19d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.297116] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1752.297116] env[62619]: value = "task-1778215" [ 1752.297116] env[62619]: _type = "Task" [ 1752.297116] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.315323] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778215, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.330460] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778214, 'name': PowerOffVM_Task, 'duration_secs': 0.213082} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.333791] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1752.335227] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1decee57-dc39-4eaf-9894-09bacdc3ea60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.361108] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda2b0db-19d8-4eba-a43f-39cced6e1467 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.408563] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1752.408883] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec7cc937-0a1f-4ccb-aab9-dfef676d1a40 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.420164] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1752.420164] env[62619]: value = "task-1778216" [ 1752.420164] env[62619]: _type = "Task" [ 1752.420164] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.433044] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1752.433228] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1752.433485] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.433626] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.433795] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1752.435144] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3da0f69-cf36-4b12-ad0c-cf8103690bc6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.437585] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5246d410-31fa-4b05-b873-ed2e9fb33559 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.442858] env[62619]: DEBUG nova.compute.manager [req-887a59e4-c2aa-4985-90a9-ff70c79b027b req-fd326a8a-1763-4ff8-b54c-6abc0dc74ea4 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Received event network-changed-47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1752.443078] env[62619]: DEBUG nova.compute.manager [req-887a59e4-c2aa-4985-90a9-ff70c79b027b req-fd326a8a-1763-4ff8-b54c-6abc0dc74ea4 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Refreshing instance network info cache due to event network-changed-47138162-63de-413e-8a1d-767355e11190. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1752.443362] env[62619]: DEBUG oslo_concurrency.lockutils [req-887a59e4-c2aa-4985-90a9-ff70c79b027b req-fd326a8a-1763-4ff8-b54c-6abc0dc74ea4 service nova] Acquiring lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.443523] env[62619]: DEBUG oslo_concurrency.lockutils [req-887a59e4-c2aa-4985-90a9-ff70c79b027b req-fd326a8a-1763-4ff8-b54c-6abc0dc74ea4 service nova] Acquired lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.443687] env[62619]: DEBUG nova.network.neutron [req-887a59e4-c2aa-4985-90a9-ff70c79b027b req-fd326a8a-1763-4ff8-b54c-6abc0dc74ea4 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Refreshing network info cache for port 47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1752.449752] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778212, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.454671] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ffe9a8-6c25-4889-88ae-c6dc6545704f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.493721] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38c6d0a-dbed-4544-b23d-b0409f2688c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.496538] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1752.496725] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1752.497892] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-144c15de-21b6-4d9c-ae91-aac14a9c661f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.508803] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1752.508803] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d51fca-aa02-01a9-a7aa-44a25d08ceda" [ 1752.508803] env[62619]: _type = "Task" [ 1752.508803] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.514180] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778213, 'name': RemoveSnapshot_Task, 'duration_secs': 0.784569} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.518370] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1752.518370] env[62619]: DEBUG nova.compute.manager [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1752.519026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b884c8f7-1a98-4b84-b6a3-ade4eed10b9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.522089] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8b35ef-0547-4938-bb12-941f8b7d6ebf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.549647] env[62619]: DEBUG nova.compute.provider_tree [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1752.550272] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d51fca-aa02-01a9-a7aa-44a25d08ceda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.810217] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778215, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.943967] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778212, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.027276] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d51fca-aa02-01a9-a7aa-44a25d08ceda, 'name': SearchDatastore_Task, 'duration_secs': 0.095903} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.031209] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff72145c-c0a3-4aa7-a854-c3cead73a226 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.040558] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1753.040558] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e7c686-cf22-28a3-051d-7208769fd0f1" [ 1753.040558] env[62619]: _type = "Task" [ 1753.040558] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.058614] env[62619]: DEBUG nova.scheduler.client.report [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1753.062690] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e7c686-cf22-28a3-051d-7208769fd0f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.063999] env[62619]: INFO nova.compute.manager [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Shelve offloading [ 1753.312596] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778215, 'name': ReconfigVM_Task, 'duration_secs': 1.007488} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.312921] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 7cb51b51-514d-4223-a82a-5cdbdab9482a/7cb51b51-514d-4223-a82a-5cdbdab9482a.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1753.313252] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance '7cb51b51-514d-4223-a82a-5cdbdab9482a' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1753.444378] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778212, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.556015] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e7c686-cf22-28a3-051d-7208769fd0f1, 'name': SearchDatastore_Task, 'duration_secs': 0.093358} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.556289] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.556640] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8745aa7f-9848-4320-94b5-08b7e3bccf80/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. {{(pid=62619) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1753.556941] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64f93436-fb92-469f-99b2-af49f6bddc7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.564951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.555s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.565509] env[62619]: DEBUG nova.compute.manager [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1753.569740] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.108s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.570078] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.572207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.378s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.576268] env[62619]: INFO nova.compute.claims [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1753.579616] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1753.580104] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1753.580104] env[62619]: value = "task-1778217" [ 1753.580104] env[62619]: _type = "Task" [ 1753.580104] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.580885] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b89e061-f491-43c2-8229-304193ae820f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.598767] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1753.598767] env[62619]: value = "task-1778218" [ 1753.598767] env[62619]: _type = "Task" [ 1753.598767] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.602316] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.614971] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1753.615302] env[62619]: DEBUG nova.compute.manager [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1753.616151] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4485283-ef7a-46a5-8eeb-1bce534144e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.621191] env[62619]: INFO nova.scheduler.client.report [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Deleted allocations for instance 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9 [ 1753.628300] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.628460] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.628643] env[62619]: DEBUG nova.network.neutron [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1753.790972] env[62619]: DEBUG nova.network.neutron [req-887a59e4-c2aa-4985-90a9-ff70c79b027b req-fd326a8a-1763-4ff8-b54c-6abc0dc74ea4 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updated VIF entry in instance network info cache for port 47138162-63de-413e-8a1d-767355e11190. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1753.791455] env[62619]: DEBUG nova.network.neutron [req-887a59e4-c2aa-4985-90a9-ff70c79b027b req-fd326a8a-1763-4ff8-b54c-6abc0dc74ea4 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updating instance_info_cache with network_info: [{"id": "47138162-63de-413e-8a1d-767355e11190", "address": "fa:16:3e:02:cd:09", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47138162-63", "ovs_interfaceid": "47138162-63de-413e-8a1d-767355e11190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.826297] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fedae8-0f8f-4f50-a499-402f96fa7f4f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.848811] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f59d70-3231-4658-bab3-2a30c165d512 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.878189] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance '7cb51b51-514d-4223-a82a-5cdbdab9482a' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1753.945394] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778212, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.084032] env[62619]: DEBUG nova.compute.utils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1754.088125] env[62619]: DEBUG nova.compute.manager [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1754.088125] env[62619]: DEBUG nova.network.neutron [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1754.102120] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.135367] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3f3790c-b590-410b-9397-f273acde61d6 tempest-ServersV294TestFqdnHostnames-1377512523 tempest-ServersV294TestFqdnHostnames-1377512523-project-member] Lock "7ee5f09f-e27b-4373-88ce-8cff2f55a2b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.807s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.163411] env[62619]: DEBUG nova.policy [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd106b23f779045f788b2536afd8c623d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2377a52a195d4f0b9181207ab5741734', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1754.293944] env[62619]: DEBUG oslo_concurrency.lockutils [req-887a59e4-c2aa-4985-90a9-ff70c79b027b req-fd326a8a-1763-4ff8-b54c-6abc0dc74ea4 service nova] Releasing lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.447705] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778212, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.95722} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.448171] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f/OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f.vmdk to [datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131/034267b5-5870-4201-8726-91111429c131.vmdk. [ 1754.448691] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Cleaning up location [datastore1] OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1754.448846] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_40470587-01ec-4065-a7d8-15f7897d698f {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1754.452217] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be62d749-ae77-4c18-9b89-2e294cb583eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.460749] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1754.460749] env[62619]: value = "task-1778219" [ 1754.460749] env[62619]: _type = "Task" [ 1754.460749] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.473955] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.491352] env[62619]: DEBUG nova.network.neutron [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Port 9567749e-9f9f-4b05-a445-3099dd2cdff8 binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1754.534754] env[62619]: DEBUG nova.network.neutron [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updating instance_info_cache with network_info: [{"id": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "address": "fa:16:3e:78:49:c7", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e91bb9-2b", "ovs_interfaceid": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.580339] env[62619]: DEBUG nova.compute.manager [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Received event network-changed-47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1754.580548] env[62619]: DEBUG nova.compute.manager [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Refreshing instance network info cache due to event network-changed-47138162-63de-413e-8a1d-767355e11190. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1754.580767] env[62619]: DEBUG oslo_concurrency.lockutils [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] Acquiring lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.580924] env[62619]: DEBUG oslo_concurrency.lockutils [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] Acquired lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.581100] env[62619]: DEBUG nova.network.neutron [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Refreshing network info cache for port 47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1754.589016] env[62619]: DEBUG nova.compute.manager [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1754.618085] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778217, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.830282] env[62619]: DEBUG nova.network.neutron [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Successfully created port: e6fbdc52-5c2d-4d4f-9c92-77d76129374f {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1754.917931] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1ef4ff-5912-486b-9dc8-70b5c07efe61 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.927705] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2efbc7-e73d-4a56-9524-e52e986575ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.960142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d5a717-4f59-4fbf-b2a9-27297388a3bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.974763] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8720265-a552-4b06-bb7e-ca7e67f6a584 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.978838] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227563} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.979111] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1754.979276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "[datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131/034267b5-5870-4201-8726-91111429c131.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.979523] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131/034267b5-5870-4201-8726-91111429c131.vmdk to [datastore1] 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85/4cd6dafd-4f19-4d0f-8e07-8171a6a71e85.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1754.980249] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-395bc9c6-e4ce-48d0-a86c-17a1d4aa35d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.990313] env[62619]: DEBUG nova.compute.provider_tree [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1755.004526] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1755.004526] env[62619]: value = "task-1778220" [ 1755.004526] env[62619]: _type = "Task" [ 1755.004526] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.015634] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.037731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.108648] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778217, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.304162} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.109007] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8745aa7f-9848-4320-94b5-08b7e3bccf80/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. [ 1755.109936] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55563d6f-6f54-432d-b6c2-430b30869875 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.144920] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 8745aa7f-9848-4320-94b5-08b7e3bccf80/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1755.145357] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89f0c395-6064-4848-9a20-c02ca0b57f97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.169989] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1755.169989] env[62619]: value = "task-1778221" [ 1755.169989] env[62619]: _type = "Task" [ 1755.169989] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.184738] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778221, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.531128] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "7cb51b51-514d-4223-a82a-5cdbdab9482a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.531128] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.531128] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.532034] env[62619]: ERROR nova.scheduler.client.report [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [req-ef17424f-ccef-47e9-9ced-48a1d9f03492] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ef17424f-ccef-47e9-9ced-48a1d9f03492"}]} [ 1755.541049] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778220, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.553235] env[62619]: DEBUG nova.scheduler.client.report [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1755.579497] env[62619]: DEBUG nova.scheduler.client.report [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1755.579781] env[62619]: DEBUG nova.compute.provider_tree [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1755.597613] env[62619]: DEBUG nova.scheduler.client.report [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1755.609243] env[62619]: DEBUG nova.network.neutron [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updated VIF entry in instance network info cache for port 47138162-63de-413e-8a1d-767355e11190. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1755.609722] env[62619]: DEBUG nova.network.neutron [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updating instance_info_cache with network_info: [{"id": "47138162-63de-413e-8a1d-767355e11190", "address": "fa:16:3e:02:cd:09", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47138162-63", "ovs_interfaceid": "47138162-63de-413e-8a1d-767355e11190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.614841] env[62619]: DEBUG nova.compute.manager [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1755.629567] env[62619]: DEBUG nova.scheduler.client.report [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1755.669455] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1755.669719] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1755.669914] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1755.670129] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1755.670276] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1755.670420] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1755.670629] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1755.670827] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1755.671104] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1755.671166] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1755.671333] env[62619]: DEBUG nova.virt.hardware [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1755.672346] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2689f0-fa55-496b-835e-40be5d6e450a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.688067] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6963d8a9-d8d5-4964-92eb-20f471aa7110 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.697994] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778221, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.874020] env[62619]: DEBUG nova.compute.manager [req-bf9dd846-dc15-42b1-840d-66dc3949a388 req-21a9b5e9-b7d0-4ec7-8f6c-d43e09be429d service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received event network-vif-unplugged-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1755.874867] env[62619]: DEBUG oslo_concurrency.lockutils [req-bf9dd846-dc15-42b1-840d-66dc3949a388 req-21a9b5e9-b7d0-4ec7-8f6c-d43e09be429d service nova] Acquiring lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.874867] env[62619]: DEBUG oslo_concurrency.lockutils [req-bf9dd846-dc15-42b1-840d-66dc3949a388 req-21a9b5e9-b7d0-4ec7-8f6c-d43e09be429d service nova] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.874867] env[62619]: DEBUG oslo_concurrency.lockutils [req-bf9dd846-dc15-42b1-840d-66dc3949a388 req-21a9b5e9-b7d0-4ec7-8f6c-d43e09be429d service nova] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.875560] env[62619]: DEBUG nova.compute.manager [req-bf9dd846-dc15-42b1-840d-66dc3949a388 req-21a9b5e9-b7d0-4ec7-8f6c-d43e09be429d service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] No waiting events found dispatching network-vif-unplugged-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1755.875560] env[62619]: WARNING nova.compute.manager [req-bf9dd846-dc15-42b1-840d-66dc3949a388 req-21a9b5e9-b7d0-4ec7-8f6c-d43e09be429d service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received unexpected event network-vif-unplugged-89e91bb9-2bd5-4385-b3dd-cee4612bb166 for instance with vm_state shelved and task_state shelving_offloading. [ 1755.928512] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1755.929724] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f249a00b-11cf-47fd-8373-d5ef0b77ddeb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.939457] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1755.939457] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afae1692-3360-43a9-ae36-079080d0d185 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.011779] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b965a67-dd63-40be-a874-0844e71d6250 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.025423] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778220, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.028990] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f93a5f-c49c-428c-9102-dfb5401bf115 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.066065] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6e3cf7-2cb1-40ad-9b27-bb9f69ac3d7c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.079869] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636fd25b-1d22-4eeb-86ee-8d7e59b80719 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.098472] env[62619]: DEBUG nova.compute.provider_tree [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1756.102905] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1756.102905] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1756.102905] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleting the datastore file [datastore1] 5cf7ca57-351f-48ab-8758-b30f50cd607f {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1756.102905] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2b8aa30-34c7-40a4-b7c0-dbc58108904b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.110651] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1756.110651] env[62619]: value = "task-1778223" [ 1756.110651] env[62619]: _type = "Task" [ 1756.110651] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.114735] env[62619]: DEBUG oslo_concurrency.lockutils [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] Releasing lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.114984] env[62619]: DEBUG nova.compute.manager [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Received event network-changed-78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1756.115164] env[62619]: DEBUG nova.compute.manager [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Refreshing instance network info cache due to event network-changed-78e09ad3-db58-4374-9332-13a1881ffdfd. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1756.115371] env[62619]: DEBUG oslo_concurrency.lockutils [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] Acquiring lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.115508] env[62619]: DEBUG oslo_concurrency.lockutils [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] Acquired lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.115668] env[62619]: DEBUG nova.network.neutron [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Refreshing network info cache for port 78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1756.122654] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.182391] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778221, 'name': ReconfigVM_Task, 'duration_secs': 0.762449} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.182879] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 8745aa7f-9848-4320-94b5-08b7e3bccf80/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1756.184068] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ac5964-89ec-412c-9ad4-424c8f109ff7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.212359] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18768e48-46f2-4029-a131-783a18b1153a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.230162] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1756.230162] env[62619]: value = "task-1778224" [ 1756.230162] env[62619]: _type = "Task" [ 1756.230162] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.242416] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778224, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.376770] env[62619]: DEBUG oslo_concurrency.lockutils [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "b84dd91a-8e08-4476-9683-655357d18370" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.377071] env[62619]: DEBUG oslo_concurrency.lockutils [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "b84dd91a-8e08-4476-9683-655357d18370" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.377286] env[62619]: DEBUG oslo_concurrency.lockutils [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "b84dd91a-8e08-4476-9683-655357d18370-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.377468] env[62619]: DEBUG oslo_concurrency.lockutils [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "b84dd91a-8e08-4476-9683-655357d18370-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.377638] env[62619]: DEBUG oslo_concurrency.lockutils [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "b84dd91a-8e08-4476-9683-655357d18370-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.379916] env[62619]: INFO nova.compute.manager [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Terminating instance [ 1756.528595] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778220, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.581433] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.581729] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.581790] env[62619]: DEBUG nova.network.neutron [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1756.628706] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.656766] env[62619]: DEBUG nova.scheduler.client.report [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 130 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1756.657063] env[62619]: DEBUG nova.compute.provider_tree [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 130 to 131 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1756.657241] env[62619]: DEBUG nova.compute.provider_tree [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1756.752711] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778224, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.889067] env[62619]: DEBUG nova.compute.manager [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1756.889067] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1756.889067] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6053265-980f-48aa-965f-b2847e976cb8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.901961] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1756.903569] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f69e5df-fbe9-464e-996f-9f7727984a3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.923640] env[62619]: DEBUG oslo_vmware.api [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1756.923640] env[62619]: value = "task-1778225" [ 1756.923640] env[62619]: _type = "Task" [ 1756.923640] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.937978] env[62619]: DEBUG oslo_vmware.api [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.035823] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778220, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.053679] env[62619]: DEBUG nova.compute.manager [req-c20e603f-db8d-4f0b-8fad-eb2e1653e510 req-db67af0b-b042-45de-8fd6-4d2f7411d5fe service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Received event network-changed-78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1757.053936] env[62619]: DEBUG nova.compute.manager [req-c20e603f-db8d-4f0b-8fad-eb2e1653e510 req-db67af0b-b042-45de-8fd6-4d2f7411d5fe service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Refreshing instance network info cache due to event network-changed-78e09ad3-db58-4374-9332-13a1881ffdfd. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1757.054194] env[62619]: DEBUG oslo_concurrency.lockutils [req-c20e603f-db8d-4f0b-8fad-eb2e1653e510 req-db67af0b-b042-45de-8fd6-4d2f7411d5fe service nova] Acquiring lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.127322] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.134364] env[62619]: DEBUG nova.network.neutron [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Successfully updated port: e6fbdc52-5c2d-4d4f-9c92-77d76129374f {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1757.165270] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.592s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.165270] env[62619]: DEBUG nova.compute.manager [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1757.170339] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.902s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.170616] env[62619]: DEBUG nova.objects.instance [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lazy-loading 'resources' on Instance uuid e34a8173-952b-4ddc-90cf-3681387733fa {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1757.245383] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778224, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.317081] env[62619]: DEBUG nova.network.neutron [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Updated VIF entry in instance network info cache for port 78e09ad3-db58-4374-9332-13a1881ffdfd. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1757.317504] env[62619]: DEBUG nova.network.neutron [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Updating instance_info_cache with network_info: [{"id": "78e09ad3-db58-4374-9332-13a1881ffdfd", "address": "fa:16:3e:06:ad:e4", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78e09ad3-db", "ovs_interfaceid": "78e09ad3-db58-4374-9332-13a1881ffdfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.410905] env[62619]: DEBUG nova.network.neutron [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance_info_cache with network_info: [{"id": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "address": "fa:16:3e:7e:df:71", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9567749e-9f", "ovs_interfaceid": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.437174] env[62619]: DEBUG oslo_vmware.api [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.528496] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778220, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.624100] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.643053] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.643138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.643273] env[62619]: DEBUG nova.network.neutron [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1757.678264] env[62619]: DEBUG nova.compute.utils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1757.682771] env[62619]: DEBUG nova.compute.manager [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1757.683022] env[62619]: DEBUG nova.network.neutron [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1757.742491] env[62619]: DEBUG nova.policy [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5983ae7d9804a0785935f7c490240a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b73b01174a314190bcdd93287203adff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1757.753212] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778224, 'name': ReconfigVM_Task, 'duration_secs': 1.23649} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.753684] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1757.754519] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0609ddf-2544-4158-babf-52f1561cb16c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.766331] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1757.766331] env[62619]: value = "task-1778226" [ 1757.766331] env[62619]: _type = "Task" [ 1757.766331] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.779530] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778226, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.821648] env[62619]: DEBUG oslo_concurrency.lockutils [req-d7efd0be-30ea-40b5-822d-958a90988b72 req-936683a8-446a-4f4a-934e-5e9fcfb939df service nova] Releasing lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.822182] env[62619]: DEBUG oslo_concurrency.lockutils [req-c20e603f-db8d-4f0b-8fad-eb2e1653e510 req-db67af0b-b042-45de-8fd6-4d2f7411d5fe service nova] Acquired lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.822378] env[62619]: DEBUG nova.network.neutron [req-c20e603f-db8d-4f0b-8fad-eb2e1653e510 req-db67af0b-b042-45de-8fd6-4d2f7411d5fe service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Refreshing network info cache for port 78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1757.913603] env[62619]: DEBUG oslo_concurrency.lockutils [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.940201] env[62619]: DEBUG oslo_vmware.api [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778225, 'name': PowerOffVM_Task, 'duration_secs': 0.790162} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.940471] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1757.941528] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1757.945303] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd9b7622-bf5b-433c-94c8-3aead15aaa3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.989225] env[62619]: DEBUG nova.compute.manager [req-102a8a1c-9e5e-436d-a651-1a7904019a92 req-f78dfdd9-b444-4420-80c2-6c42b2f428ba service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Received event network-vif-plugged-e6fbdc52-5c2d-4d4f-9c92-77d76129374f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1757.989225] env[62619]: DEBUG oslo_concurrency.lockutils [req-102a8a1c-9e5e-436d-a651-1a7904019a92 req-f78dfdd9-b444-4420-80c2-6c42b2f428ba service nova] Acquiring lock "88f79718-97d0-432c-8515-b60ab3dfd7e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.991046] env[62619]: DEBUG oslo_concurrency.lockutils [req-102a8a1c-9e5e-436d-a651-1a7904019a92 req-f78dfdd9-b444-4420-80c2-6c42b2f428ba service nova] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.991346] env[62619]: DEBUG oslo_concurrency.lockutils [req-102a8a1c-9e5e-436d-a651-1a7904019a92 req-f78dfdd9-b444-4420-80c2-6c42b2f428ba service nova] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.991554] env[62619]: DEBUG nova.compute.manager [req-102a8a1c-9e5e-436d-a651-1a7904019a92 req-f78dfdd9-b444-4420-80c2-6c42b2f428ba service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] No waiting events found dispatching network-vif-plugged-e6fbdc52-5c2d-4d4f-9c92-77d76129374f {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1757.991728] env[62619]: WARNING nova.compute.manager [req-102a8a1c-9e5e-436d-a651-1a7904019a92 req-f78dfdd9-b444-4420-80c2-6c42b2f428ba service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Received unexpected event network-vif-plugged-e6fbdc52-5c2d-4d4f-9c92-77d76129374f for instance with vm_state building and task_state spawning. [ 1758.031771] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778220, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.618536} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.032016] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/034267b5-5870-4201-8726-91111429c131/034267b5-5870-4201-8726-91111429c131.vmdk to [datastore1] 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85/4cd6dafd-4f19-4d0f-8e07-8171a6a71e85.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1758.032822] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e101d7ee-1bf1-451a-a14d-dc0f6dc112bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.036907] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ca87e2-c4f1-4f94-a485-5c504d6bac96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.053371] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f575d377-542b-4485-a8bd-17567242e363 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.067809] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85/4cd6dafd-4f19-4d0f-8e07-8171a6a71e85.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1758.067809] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e8f4651-6876-45d7-9df8-77d55a2a2617 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.084841] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1758.084841] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1758.084841] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Deleting the datastore file [datastore1] b84dd91a-8e08-4476-9683-655357d18370 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1758.084841] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00bf2b15-7065-46d2-ac42-0b41c2c92391 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.111728] env[62619]: DEBUG nova.network.neutron [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Successfully created port: 0a3f2ddf-2344-4713-a719-025a5945f591 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1758.116458] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f51f7d-bbea-41e9-a9b4-63fd528fc585 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.120382] env[62619]: DEBUG oslo_vmware.api [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1758.120382] env[62619]: value = "task-1778228" [ 1758.120382] env[62619]: _type = "Task" [ 1758.120382] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.120382] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1758.120382] env[62619]: value = "task-1778229" [ 1758.120382] env[62619]: _type = "Task" [ 1758.120382] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.136755] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b496851-d6d6-4379-acc9-f947fbc717b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.154700] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.154983] env[62619]: DEBUG oslo_vmware.api [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.557884} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.155202] env[62619]: DEBUG oslo_vmware.api [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.155915] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1758.156116] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1758.156288] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1758.167646] env[62619]: DEBUG nova.compute.provider_tree [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1758.185811] env[62619]: DEBUG nova.compute.manager [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1758.189819] env[62619]: DEBUG nova.compute.manager [req-bf3ca395-be9c-4737-8f4b-a4aa5193c4af req-3ab05db8-675e-4bf2-8d7a-826a27bffc54 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received event network-changed-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1758.189819] env[62619]: DEBUG nova.compute.manager [req-bf3ca395-be9c-4737-8f4b-a4aa5193c4af req-3ab05db8-675e-4bf2-8d7a-826a27bffc54 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Refreshing instance network info cache due to event network-changed-89e91bb9-2bd5-4385-b3dd-cee4612bb166. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1758.189819] env[62619]: DEBUG oslo_concurrency.lockutils [req-bf3ca395-be9c-4737-8f4b-a4aa5193c4af req-3ab05db8-675e-4bf2-8d7a-826a27bffc54 service nova] Acquiring lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1758.189819] env[62619]: DEBUG oslo_concurrency.lockutils [req-bf3ca395-be9c-4737-8f4b-a4aa5193c4af req-3ab05db8-675e-4bf2-8d7a-826a27bffc54 service nova] Acquired lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1758.189819] env[62619]: DEBUG nova.network.neutron [req-bf3ca395-be9c-4737-8f4b-a4aa5193c4af req-3ab05db8-675e-4bf2-8d7a-826a27bffc54 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Refreshing network info cache for port 89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1758.191054] env[62619]: INFO nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleted allocations for instance 5cf7ca57-351f-48ab-8758-b30f50cd607f [ 1758.218381] env[62619]: DEBUG nova.network.neutron [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1758.277756] env[62619]: DEBUG oslo_vmware.api [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778226, 'name': PowerOnVM_Task, 'duration_secs': 0.50179} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.278084] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1758.281177] env[62619]: DEBUG nova.compute.manager [None req-fdb9dbc4-5b4b-46c9-81d2-cb8eba93b00a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1758.282027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8737c54-bc6a-4fe5-b8ad-494396eedc19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.443618] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c03af0a-1c4e-4a7f-a5fd-5acb8a44ee2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.470237] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a80159-4d79-4467-8efe-3215b9f904c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.477173] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance '7cb51b51-514d-4223-a82a-5cdbdab9482a' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1758.526970] env[62619]: DEBUG nova.network.neutron [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Updating instance_info_cache with network_info: [{"id": "e6fbdc52-5c2d-4d4f-9c92-77d76129374f", "address": "fa:16:3e:39:18:68", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6fbdc52-5c", "ovs_interfaceid": "e6fbdc52-5c2d-4d4f-9c92-77d76129374f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.642284] env[62619]: DEBUG oslo_vmware.api [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189282} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.645289] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1758.645504] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1758.645747] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1758.645860] env[62619]: INFO nova.compute.manager [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Took 1.76 seconds to destroy the instance on the hypervisor. [ 1758.646268] env[62619]: DEBUG oslo.service.loopingcall [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1758.646441] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778229, 'name': ReconfigVM_Task, 'duration_secs': 0.451253} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.646569] env[62619]: DEBUG nova.compute.manager [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1758.646638] env[62619]: DEBUG nova.network.neutron [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1758.648456] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85/4cd6dafd-4f19-4d0f-8e07-8171a6a71e85.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1758.649127] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-53d9558e-bc10-40d8-87ca-49d8a117074d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.657105] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1758.657105] env[62619]: value = "task-1778230" [ 1758.657105] env[62619]: _type = "Task" [ 1758.657105] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.667470] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778230, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.673019] env[62619]: DEBUG nova.scheduler.client.report [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1758.695460] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.700412] env[62619]: DEBUG nova.network.neutron [req-c20e603f-db8d-4f0b-8fad-eb2e1653e510 req-db67af0b-b042-45de-8fd6-4d2f7411d5fe service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Updated VIF entry in instance network info cache for port 78e09ad3-db58-4374-9332-13a1881ffdfd. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1758.700412] env[62619]: DEBUG nova.network.neutron [req-c20e603f-db8d-4f0b-8fad-eb2e1653e510 req-db67af0b-b042-45de-8fd6-4d2f7411d5fe service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Updating instance_info_cache with network_info: [{"id": "78e09ad3-db58-4374-9332-13a1881ffdfd", "address": "fa:16:3e:06:ad:e4", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78e09ad3-db", "ovs_interfaceid": "78e09ad3-db58-4374-9332-13a1881ffdfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.990385] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1758.990385] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-474491ae-7575-40e7-9a0a-ddb71115092b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.996790] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1758.996790] env[62619]: value = "task-1778231" [ 1758.996790] env[62619]: _type = "Task" [ 1758.996790] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.005740] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778231, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.029285] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.029616] env[62619]: DEBUG nova.compute.manager [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Instance network_info: |[{"id": "e6fbdc52-5c2d-4d4f-9c92-77d76129374f", "address": "fa:16:3e:39:18:68", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6fbdc52-5c", "ovs_interfaceid": "e6fbdc52-5c2d-4d4f-9c92-77d76129374f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1759.030031] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:18:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6fbdc52-5c2d-4d4f-9c92-77d76129374f', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1759.038190] env[62619]: DEBUG oslo.service.loopingcall [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1759.038930] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1759.039117] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-773712b7-5791-460d-afbb-336c3e6c72fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.063397] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1759.063397] env[62619]: value = "task-1778232" [ 1759.063397] env[62619]: _type = "Task" [ 1759.063397] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.072460] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778232, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.168303] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778230, 'name': Rename_Task, 'duration_secs': 0.308717} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.168639] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1759.169096] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76a5f48a-0339-4619-9a4f-8273d8ee5fc3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.174948] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.178583] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.483s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.178789] env[62619]: DEBUG nova.objects.instance [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'resources' on Instance uuid 5cf7ca57-351f-48ab-8758-b30f50cd607f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1759.180901] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1759.180901] env[62619]: value = "task-1778233" [ 1759.180901] env[62619]: _type = "Task" [ 1759.180901] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.188849] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778233, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.201408] env[62619]: DEBUG nova.compute.manager [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1759.204848] env[62619]: INFO nova.scheduler.client.report [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted allocations for instance e34a8173-952b-4ddc-90cf-3681387733fa [ 1759.205961] env[62619]: DEBUG oslo_concurrency.lockutils [req-c20e603f-db8d-4f0b-8fad-eb2e1653e510 req-db67af0b-b042-45de-8fd6-4d2f7411d5fe service nova] Releasing lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.216822] env[62619]: DEBUG nova.network.neutron [req-bf3ca395-be9c-4737-8f4b-a4aa5193c4af req-3ab05db8-675e-4bf2-8d7a-826a27bffc54 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updated VIF entry in instance network info cache for port 89e91bb9-2bd5-4385-b3dd-cee4612bb166. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1759.218030] env[62619]: DEBUG nova.network.neutron [req-bf3ca395-be9c-4737-8f4b-a4aa5193c4af req-3ab05db8-675e-4bf2-8d7a-826a27bffc54 service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updating instance_info_cache with network_info: [{"id": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "address": "fa:16:3e:78:49:c7", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap89e91bb9-2b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.239638] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1759.239805] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1759.239981] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1759.240277] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1759.240456] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1759.240720] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1759.241108] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1759.241376] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1759.241648] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1759.241973] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1759.242294] env[62619]: DEBUG nova.virt.hardware [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1759.243767] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251f41c4-28d0-4a55-8bf9-c3412a8cce46 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.259100] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3064ba93-905a-461e-9cc2-7e0740109428 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.280366] env[62619]: INFO nova.compute.manager [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Rebuilding instance [ 1759.324279] env[62619]: DEBUG nova.compute.manager [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1759.325393] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f643995-fdc1-4949-978f-58f323684791 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.488931] env[62619]: DEBUG nova.network.neutron [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.510610] env[62619]: DEBUG oslo_vmware.api [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778231, 'name': PowerOnVM_Task, 'duration_secs': 0.437292} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.511095] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1759.511443] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-29dde0f9-0793-443c-9b4a-8275dee18437 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance '7cb51b51-514d-4223-a82a-5cdbdab9482a' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1759.579059] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778232, 'name': CreateVM_Task, 'duration_secs': 0.450592} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.579059] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1759.579059] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.579059] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.579393] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1759.579645] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02787dea-b930-419b-bcd1-cc9bd84005c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.586015] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1759.586015] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52217c63-5e15-450d-2f4a-b78fa53b6b74" [ 1759.586015] env[62619]: _type = "Task" [ 1759.586015] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.594670] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52217c63-5e15-450d-2f4a-b78fa53b6b74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.691402] env[62619]: DEBUG nova.objects.instance [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'numa_topology' on Instance uuid 5cf7ca57-351f-48ab-8758-b30f50cd607f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1759.698807] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778233, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.709218] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1759.714146] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d597e57c-595c-4970-9ae9-f64586f34aae tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e34a8173-952b-4ddc-90cf-3681387733fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.508s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.720299] env[62619]: DEBUG oslo_concurrency.lockutils [req-bf3ca395-be9c-4737-8f4b-a4aa5193c4af req-3ab05db8-675e-4bf2-8d7a-826a27bffc54 service nova] Releasing lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.837894] env[62619]: DEBUG nova.compute.manager [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Received event network-changed-e6fbdc52-5c2d-4d4f-9c92-77d76129374f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1759.838293] env[62619]: DEBUG nova.compute.manager [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Refreshing instance network info cache due to event network-changed-e6fbdc52-5c2d-4d4f-9c92-77d76129374f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1759.838480] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] Acquiring lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.838614] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] Acquired lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.838781] env[62619]: DEBUG nova.network.neutron [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Refreshing network info cache for port e6fbdc52-5c2d-4d4f-9c92-77d76129374f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1759.916492] env[62619]: DEBUG nova.network.neutron [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Successfully updated port: 0a3f2ddf-2344-4713-a719-025a5945f591 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1759.990494] env[62619]: INFO nova.compute.manager [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] Took 1.34 seconds to deallocate network for instance. [ 1760.050031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "interface-4983b333-debb-4a2b-b28d-b321f0d8d7d7-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.050305] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-4983b333-debb-4a2b-b28d-b321f0d8d7d7-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.051022] env[62619]: DEBUG nova.objects.instance [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'flavor' on Instance uuid 4983b333-debb-4a2b-b28d-b321f0d8d7d7 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1760.098964] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52217c63-5e15-450d-2f4a-b78fa53b6b74, 'name': SearchDatastore_Task, 'duration_secs': 0.035436} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.099310] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.099561] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1760.099835] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.099987] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.100190] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1760.100456] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-092b261d-f94d-4057-87c0-bd3ef523183f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.110235] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1760.110425] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1760.111159] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff79eddf-b091-4b96-8d3b-df6dbd6d3dc9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.116727] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1760.116727] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52494cc0-463b-007c-47a3-4352d97d2283" [ 1760.116727] env[62619]: _type = "Task" [ 1760.116727] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.125323] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52494cc0-463b-007c-47a3-4352d97d2283, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.191915] env[62619]: DEBUG oslo_vmware.api [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778233, 'name': PowerOnVM_Task, 'duration_secs': 0.694823} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.192205] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1760.195419] env[62619]: DEBUG nova.objects.base [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Object Instance<5cf7ca57-351f-48ab-8758-b30f50cd607f> lazy-loaded attributes: resources,numa_topology {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1760.221160] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.221160] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.221160] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.221160] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1760.221160] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.292334] env[62619]: DEBUG nova.compute.manager [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1760.293341] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0fc281-3278-47bd-934c-d07b727a916f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.347184] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1760.348198] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8098a75-168c-4132-9788-d4f4b1df702f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.357066] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for the task: (returnval){ [ 1760.357066] env[62619]: value = "task-1778234" [ 1760.357066] env[62619]: _type = "Task" [ 1760.357066] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.366498] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778234, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.388746] env[62619]: INFO nova.compute.manager [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Unrescuing [ 1760.389034] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.389203] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.389374] env[62619]: DEBUG nova.network.neutron [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1760.418806] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquiring lock "refresh_cache-e19650da-cc3d-4350-be3e-dc776ce68206" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.418951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquired lock "refresh_cache-e19650da-cc3d-4350-be3e-dc776ce68206" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.419166] env[62619]: DEBUG nova.network.neutron [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1760.443818] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdab7507-5e65-4a59-922d-260e591eb4e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.455440] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4d50fa-dd2b-4523-aca0-6c3c267a15f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.492340] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae0885b-e183-4f4d-a736-9e80d4a02edb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.496593] env[62619]: DEBUG oslo_concurrency.lockutils [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.504142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6a885a-abef-4f6d-b29d-67fa836005f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.525881] env[62619]: DEBUG nova.compute.provider_tree [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1760.614034] env[62619]: DEBUG nova.network.neutron [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Updated VIF entry in instance network info cache for port e6fbdc52-5c2d-4d4f-9c92-77d76129374f. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1760.614407] env[62619]: DEBUG nova.network.neutron [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Updating instance_info_cache with network_info: [{"id": "e6fbdc52-5c2d-4d4f-9c92-77d76129374f", "address": "fa:16:3e:39:18:68", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6fbdc52-5c", "ovs_interfaceid": "e6fbdc52-5c2d-4d4f-9c92-77d76129374f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.626606] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52494cc0-463b-007c-47a3-4352d97d2283, 'name': SearchDatastore_Task, 'duration_secs': 0.010307} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.628053] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7f4800b-6080-4510-ad78-6036b0b1b23e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.634630] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1760.634630] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c7e8f7-e930-962b-1646-f04186e104bf" [ 1760.634630] env[62619]: _type = "Task" [ 1760.634630] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.644218] env[62619]: DEBUG oslo_concurrency.lockutils [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "b84dd91a-8e08-4476-9683-655357d18370" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.644636] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c7e8f7-e930-962b-1646-f04186e104bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.667721] env[62619]: DEBUG nova.objects.instance [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'pci_requests' on Instance uuid 4983b333-debb-4a2b-b28d-b321f0d8d7d7 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1760.723715] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.813912] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e63aba1c-3b47-40c0-a8d9-617168cbc2a0 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 33.470s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.868439] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778234, 'name': PowerOffVM_Task, 'duration_secs': 0.217385} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.868714] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1760.869471] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1760.869723] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb888461-fedd-4214-9a5e-bec545db4950 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.877629] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for the task: (returnval){ [ 1760.877629] env[62619]: value = "task-1778235" [ 1760.877629] env[62619]: _type = "Task" [ 1760.877629] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.888210] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778235, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.958205] env[62619]: DEBUG nova.network.neutron [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1760.982022] env[62619]: DEBUG nova.compute.manager [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] [instance: b84dd91a-8e08-4476-9683-655357d18370] Received event network-vif-deleted-78e09ad3-db58-4374-9332-13a1881ffdfd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1760.982227] env[62619]: DEBUG nova.compute.manager [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Received event network-vif-plugged-0a3f2ddf-2344-4713-a719-025a5945f591 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1760.982421] env[62619]: DEBUG oslo_concurrency.lockutils [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] Acquiring lock "e19650da-cc3d-4350-be3e-dc776ce68206-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.982624] env[62619]: DEBUG oslo_concurrency.lockutils [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] Lock "e19650da-cc3d-4350-be3e-dc776ce68206-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.982784] env[62619]: DEBUG oslo_concurrency.lockutils [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] Lock "e19650da-cc3d-4350-be3e-dc776ce68206-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.982944] env[62619]: DEBUG nova.compute.manager [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] No waiting events found dispatching network-vif-plugged-0a3f2ddf-2344-4713-a719-025a5945f591 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1760.983253] env[62619]: WARNING nova.compute.manager [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Received unexpected event network-vif-plugged-0a3f2ddf-2344-4713-a719-025a5945f591 for instance with vm_state building and task_state spawning. [ 1760.983429] env[62619]: DEBUG nova.compute.manager [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Received event network-changed-0a3f2ddf-2344-4713-a719-025a5945f591 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1760.983579] env[62619]: DEBUG nova.compute.manager [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Refreshing instance network info cache due to event network-changed-0a3f2ddf-2344-4713-a719-025a5945f591. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1760.983770] env[62619]: DEBUG oslo_concurrency.lockutils [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] Acquiring lock "refresh_cache-e19650da-cc3d-4350-be3e-dc776ce68206" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1761.054863] env[62619]: ERROR nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [req-bad5c592-39aa-4223-a681-3a9986dcbd0f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bad5c592-39aa-4223-a681-3a9986dcbd0f"}]} [ 1761.077299] env[62619]: DEBUG nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1761.090137] env[62619]: DEBUG nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1761.090419] env[62619]: DEBUG nova.compute.provider_tree [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1761.096780] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.096999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.103674] env[62619]: DEBUG nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1761.114834] env[62619]: DEBUG nova.network.neutron [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Updating instance_info_cache with network_info: [{"id": "0a3f2ddf-2344-4713-a719-025a5945f591", "address": "fa:16:3e:bd:5e:3c", "network": {"id": "855d8997-340b-4f59-b336-0554aab9b655", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2077451815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73b01174a314190bcdd93287203adff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a3f2ddf-23", "ovs_interfaceid": "0a3f2ddf-2344-4713-a719-025a5945f591", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1761.120215] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] Releasing lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.120453] env[62619]: DEBUG nova.compute.manager [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Received event network-changed-47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1761.120618] env[62619]: DEBUG nova.compute.manager [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Refreshing instance network info cache due to event network-changed-47138162-63de-413e-8a1d-767355e11190. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1761.120819] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] Acquiring lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1761.120960] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] Acquired lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.121128] env[62619]: DEBUG nova.network.neutron [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Refreshing network info cache for port 47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1761.128019] env[62619]: DEBUG nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1761.150854] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c7e8f7-e930-962b-1646-f04186e104bf, 'name': SearchDatastore_Task, 'duration_secs': 0.010903} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.155508] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.155773] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 88f79718-97d0-432c-8515-b60ab3dfd7e0/88f79718-97d0-432c-8515-b60ab3dfd7e0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1761.156575] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cc3a73b-a005-4292-b86a-0cf631f96d76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.165516] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1761.165516] env[62619]: value = "task-1778236" [ 1761.165516] env[62619]: _type = "Task" [ 1761.165516] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.171558] env[62619]: DEBUG nova.objects.base [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Object Instance<4983b333-debb-4a2b-b28d-b321f0d8d7d7> lazy-loaded attributes: flavor,pci_requests {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1761.171754] env[62619]: DEBUG nova.network.neutron [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1761.179349] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778236, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.204017] env[62619]: DEBUG nova.network.neutron [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Updating instance_info_cache with network_info: [{"id": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "address": "fa:16:3e:da:01:a7", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9abe90c7-65", "ovs_interfaceid": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1761.218261] env[62619]: DEBUG nova.policy [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8d937f303584c3daea133a6283fd5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23d77e73a09d492695fbfe6ac2c93371', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1761.383624] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6dfafeb-6368-41f7-adaa-f1e0bebee235 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.396588] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8f4b18-fc23-484d-8058-d9c8106b3f5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.400188] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1761.400491] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1761.400663] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369095', 'volume_id': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'name': 'volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'serial': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1761.401976] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b547761-d0ad-490b-87a3-dda8dc5479fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.451647] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3efb19f-7a18-4545-b289-0022618311d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.455400] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bb685a-bb48-48aa-ae59-81fefc7ae481 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.468023] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0caf13-e791-4b43-bc97-11b529b5e5a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.473276] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b738b562-e59b-4bf7-aef2-c5694d3482f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.504025] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b057cb97-bd1a-4974-a3c2-fdf3c648f6ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.507254] env[62619]: DEBUG nova.compute.provider_tree [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1761.528057] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] The volume has not been displaced from its original location: [datastore1] volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9/volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1761.533419] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Reconfiguring VM instance instance-00000053 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1761.534739] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d38dba88-7a3a-4708-b8a9-336666b7431e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.548907] env[62619]: DEBUG nova.network.neutron [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Successfully created port: f1f8853f-2de2-45f7-8853-4591e89cac0b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1761.560958] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for the task: (returnval){ [ 1761.560958] env[62619]: value = "task-1778237" [ 1761.560958] env[62619]: _type = "Task" [ 1761.560958] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.573702] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778237, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.602465] env[62619]: DEBUG nova.compute.manager [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1761.620893] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Releasing lock "refresh_cache-e19650da-cc3d-4350-be3e-dc776ce68206" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.621302] env[62619]: DEBUG nova.compute.manager [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Instance network_info: |[{"id": "0a3f2ddf-2344-4713-a719-025a5945f591", "address": "fa:16:3e:bd:5e:3c", "network": {"id": "855d8997-340b-4f59-b336-0554aab9b655", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2077451815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73b01174a314190bcdd93287203adff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a3f2ddf-23", "ovs_interfaceid": "0a3f2ddf-2344-4713-a719-025a5945f591", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1761.621752] env[62619]: DEBUG oslo_concurrency.lockutils [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] Acquired lock "refresh_cache-e19650da-cc3d-4350-be3e-dc776ce68206" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.622018] env[62619]: DEBUG nova.network.neutron [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Refreshing network info cache for port 0a3f2ddf-2344-4713-a719-025a5945f591 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1761.623332] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:5e:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a3f2ddf-2344-4713-a719-025a5945f591', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1761.631834] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Creating folder: Project (b73b01174a314190bcdd93287203adff). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1761.634879] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67031e5a-5c2b-406a-93e2-4fba5911928a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.648754] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Created folder: Project (b73b01174a314190bcdd93287203adff) in parent group-v368875. [ 1761.649077] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Creating folder: Instances. Parent ref: group-v369129. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1761.649202] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff33275c-5ef5-4db5-80ed-c817fb080789 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.668437] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Created folder: Instances in parent group-v369129. [ 1761.668437] env[62619]: DEBUG oslo.service.loopingcall [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1761.669990] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1761.670232] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa0299bc-f1da-4153-b04f-aa5c5f1913e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.693924] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778236, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526982} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.695467] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 88f79718-97d0-432c-8515-b60ab3dfd7e0/88f79718-97d0-432c-8515-b60ab3dfd7e0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1761.695710] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1761.695990] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1761.695990] env[62619]: value = "task-1778240" [ 1761.695990] env[62619]: _type = "Task" [ 1761.695990] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.696336] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44486c6e-1d63-4e15-a14c-f5fd23f16a23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.707707] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.708716] env[62619]: DEBUG nova.objects.instance [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lazy-loading 'flavor' on Instance uuid 8745aa7f-9848-4320-94b5-08b7e3bccf80 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1761.709942] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778240, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.711492] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1761.711492] env[62619]: value = "task-1778241" [ 1761.711492] env[62619]: _type = "Task" [ 1761.711492] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.721497] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778241, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.001902] env[62619]: DEBUG nova.network.neutron [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updated VIF entry in instance network info cache for port 47138162-63de-413e-8a1d-767355e11190. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1762.002293] env[62619]: DEBUG nova.network.neutron [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updating instance_info_cache with network_info: [{"id": "47138162-63de-413e-8a1d-767355e11190", "address": "fa:16:3e:02:cd:09", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47138162-63", "ovs_interfaceid": "47138162-63de-413e-8a1d-767355e11190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.039873] env[62619]: ERROR nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [req-c689b769-100e-4781-84d6-4bfdc3f190fc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c689b769-100e-4781-84d6-4bfdc3f190fc"}]} [ 1762.057391] env[62619]: DEBUG nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1762.072219] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778237, 'name': ReconfigVM_Task, 'duration_secs': 0.322204} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.072537] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Reconfigured VM instance instance-00000053 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1762.079366] env[62619]: DEBUG nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1762.079671] env[62619]: DEBUG nova.compute.provider_tree [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1762.082045] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8dc20363-efd2-405d-8207-ec0d47d5ac67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.100929] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for the task: (returnval){ [ 1762.100929] env[62619]: value = "task-1778242" [ 1762.100929] env[62619]: _type = "Task" [ 1762.100929] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.116919] env[62619]: DEBUG nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1762.119046] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778242, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.134695] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.148253] env[62619]: DEBUG nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1762.210274] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778240, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.222024] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d938ef71-8614-4849-b322-5f8d9f9787ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.229069] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778241, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086227} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.233114] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1762.234037] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c129bf-d0fb-4519-b85b-ed9db676b568 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.261040] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1762.265082] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e96ead6-c713-4daa-b24b-0758693c375c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.285772] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 88f79718-97d0-432c-8515-b60ab3dfd7e0/88f79718-97d0-432c-8515-b60ab3dfd7e0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1762.289949] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aed598a6-f562-4b84-8c4e-071ae164db03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.305480] env[62619]: DEBUG oslo_vmware.api [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1762.305480] env[62619]: value = "task-1778243" [ 1762.305480] env[62619]: _type = "Task" [ 1762.305480] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.322620] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1762.322620] env[62619]: value = "task-1778244" [ 1762.322620] env[62619]: _type = "Task" [ 1762.322620] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.324546] env[62619]: DEBUG oslo_vmware.api [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778243, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.336327] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778244, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.393965] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "7cb51b51-514d-4223-a82a-5cdbdab9482a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.394324] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.394676] env[62619]: DEBUG nova.compute.manager [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Going to confirm migration 5 {{(pid=62619) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1762.505620] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e5a284a-6522-4f05-a751-434f90066765 req-32b905db-2428-4314-bb26-33c4d3b60815 service nova] Releasing lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1762.518426] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13f5f6e-ef95-4b08-81ab-445b110b63b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.529204] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c037cf-954c-4c05-88ce-26c24ab089f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.533703] env[62619]: DEBUG nova.network.neutron [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Updated VIF entry in instance network info cache for port 0a3f2ddf-2344-4713-a719-025a5945f591. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1762.534071] env[62619]: DEBUG nova.network.neutron [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Updating instance_info_cache with network_info: [{"id": "0a3f2ddf-2344-4713-a719-025a5945f591", "address": "fa:16:3e:bd:5e:3c", "network": {"id": "855d8997-340b-4f59-b336-0554aab9b655", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2077451815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b73b01174a314190bcdd93287203adff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a3f2ddf-23", "ovs_interfaceid": "0a3f2ddf-2344-4713-a719-025a5945f591", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.567773] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6368d4-69cd-4839-9442-74ae5ac3309a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.571542] env[62619]: DEBUG nova.compute.manager [req-df6a5220-bb55-4b4d-9fa0-00a3454ab365 req-15481d3b-652f-416e-b4a5-b05b6d6c2a0e service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Received event network-changed-47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1762.571735] env[62619]: DEBUG nova.compute.manager [req-df6a5220-bb55-4b4d-9fa0-00a3454ab365 req-15481d3b-652f-416e-b4a5-b05b6d6c2a0e service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Refreshing instance network info cache due to event network-changed-47138162-63de-413e-8a1d-767355e11190. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1762.571942] env[62619]: DEBUG oslo_concurrency.lockutils [req-df6a5220-bb55-4b4d-9fa0-00a3454ab365 req-15481d3b-652f-416e-b4a5-b05b6d6c2a0e service nova] Acquiring lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1762.572105] env[62619]: DEBUG oslo_concurrency.lockutils [req-df6a5220-bb55-4b4d-9fa0-00a3454ab365 req-15481d3b-652f-416e-b4a5-b05b6d6c2a0e service nova] Acquired lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1762.572260] env[62619]: DEBUG nova.network.neutron [req-df6a5220-bb55-4b4d-9fa0-00a3454ab365 req-15481d3b-652f-416e-b4a5-b05b6d6c2a0e service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Refreshing network info cache for port 47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1762.580646] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf45a84-caa9-49a9-a734-241574be6229 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.598782] env[62619]: DEBUG nova.compute.provider_tree [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1762.611639] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778242, 'name': ReconfigVM_Task, 'duration_secs': 0.30072} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.611983] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369095', 'volume_id': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'name': 'volume-cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9', 'serial': 'cd357a5b-17b3-4318-9f18-e8e95fb8d9d9'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1762.612258] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1762.613093] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e3e2c3-8b58-42d6-822a-c623a5271969 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.621594] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1762.621858] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e841963-2160-4a20-865c-223401e36792 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.628441] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.709131] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778240, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.722475] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1762.722704] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1762.722888] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Deleting the datastore file [datastore1] f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1762.723175] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3c6c7c0-898b-44d0-9a70-9315a17b66a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.730604] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for the task: (returnval){ [ 1762.730604] env[62619]: value = "task-1778246" [ 1762.730604] env[62619]: _type = "Task" [ 1762.730604] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.739124] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778246, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.819272] env[62619]: DEBUG oslo_vmware.api [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778243, 'name': PowerOffVM_Task, 'duration_secs': 0.307013} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.819558] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1762.824956] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1762.825360] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d507fdf-504e-4890-96d5-fc88c23fee6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.847482] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778244, 'name': ReconfigVM_Task, 'duration_secs': 0.397547} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.848906] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 88f79718-97d0-432c-8515-b60ab3dfd7e0/88f79718-97d0-432c-8515-b60ab3dfd7e0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1762.849616] env[62619]: DEBUG oslo_vmware.api [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1762.849616] env[62619]: value = "task-1778247" [ 1762.849616] env[62619]: _type = "Task" [ 1762.849616] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.849800] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f2c5fa5-8590-4578-9e5c-6ea9c0a57a35 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.858469] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1762.858469] env[62619]: value = "task-1778248" [ 1762.858469] env[62619]: _type = "Task" [ 1762.858469] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.861831] env[62619]: DEBUG oslo_vmware.api [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778247, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.870641] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778248, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.023122] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.023378] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.023483] env[62619]: DEBUG nova.network.neutron [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1763.023688] env[62619]: DEBUG nova.objects.instance [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lazy-loading 'info_cache' on Instance uuid 7cb51b51-514d-4223-a82a-5cdbdab9482a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1763.037643] env[62619]: DEBUG oslo_concurrency.lockutils [req-65d98229-4ad8-420e-a54c-2665315138f3 req-820fca61-28eb-4fa9-be8a-88903fde4cd7 service nova] Releasing lock "refresh_cache-e19650da-cc3d-4350-be3e-dc776ce68206" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.105018] env[62619]: DEBUG nova.scheduler.client.report [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1763.210543] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778240, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.244988] env[62619]: DEBUG oslo_vmware.api [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Task: {'id': task-1778246, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077789} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.245890] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1763.245890] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1763.246105] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1763.293845] env[62619]: DEBUG nova.compute.manager [req-e78d749b-0c3e-4c25-af34-caa11c85ac16 req-0b9249c9-c8e0-4226-af1b-364e5569ed0b service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Received event network-vif-plugged-f1f8853f-2de2-45f7-8853-4591e89cac0b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1763.294281] env[62619]: DEBUG oslo_concurrency.lockutils [req-e78d749b-0c3e-4c25-af34-caa11c85ac16 req-0b9249c9-c8e0-4226-af1b-364e5569ed0b service nova] Acquiring lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.294649] env[62619]: DEBUG oslo_concurrency.lockutils [req-e78d749b-0c3e-4c25-af34-caa11c85ac16 req-0b9249c9-c8e0-4226-af1b-364e5569ed0b service nova] Lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.294944] env[62619]: DEBUG oslo_concurrency.lockutils [req-e78d749b-0c3e-4c25-af34-caa11c85ac16 req-0b9249c9-c8e0-4226-af1b-364e5569ed0b service nova] Lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.295231] env[62619]: DEBUG nova.compute.manager [req-e78d749b-0c3e-4c25-af34-caa11c85ac16 req-0b9249c9-c8e0-4226-af1b-364e5569ed0b service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] No waiting events found dispatching network-vif-plugged-f1f8853f-2de2-45f7-8853-4591e89cac0b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1763.295510] env[62619]: WARNING nova.compute.manager [req-e78d749b-0c3e-4c25-af34-caa11c85ac16 req-0b9249c9-c8e0-4226-af1b-364e5569ed0b service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Received unexpected event network-vif-plugged-f1f8853f-2de2-45f7-8853-4591e89cac0b for instance with vm_state active and task_state None. [ 1763.306896] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1763.306896] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a6015992-c1a8-4ea5-b2f3-5167e713b839 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.319173] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0628287a-5cba-4c66-b8e1-42a408403aaa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.355105] env[62619]: ERROR nova.compute.manager [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Failed to detach volume cd357a5b-17b3-4318-9f18-e8e95fb8d9d9 from /dev/sda: nova.exception.InstanceNotFound: Instance f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e could not be found. [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Traceback (most recent call last): [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self.driver.rebuild(**kwargs) [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] raise NotImplementedError() [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] NotImplementedError [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] During handling of the above exception, another exception occurred: [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Traceback (most recent call last): [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self.driver.detach_volume(context, old_connection_info, [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] return self._volumeops.detach_volume(connection_info, instance) [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self._detach_volume_vmdk(connection_info, instance) [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] stable_ref.fetch_moref(session) [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] nova.exception.InstanceNotFound: Instance f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e could not be found. [ 1763.355105] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] [ 1763.369886] env[62619]: DEBUG nova.network.neutron [req-df6a5220-bb55-4b4d-9fa0-00a3454ab365 req-15481d3b-652f-416e-b4a5-b05b6d6c2a0e service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updated VIF entry in instance network info cache for port 47138162-63de-413e-8a1d-767355e11190. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1763.370241] env[62619]: DEBUG nova.network.neutron [req-df6a5220-bb55-4b4d-9fa0-00a3454ab365 req-15481d3b-652f-416e-b4a5-b05b6d6c2a0e service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updating instance_info_cache with network_info: [{"id": "47138162-63de-413e-8a1d-767355e11190", "address": "fa:16:3e:02:cd:09", "network": {"id": "014f0330-ae80-41d1-8155-a6a0fbf47197", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1852196471-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "079ddd8f5dc14fa699b4961995733f95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "678ebbe4-4c53-4eaf-a689-93981310f37d", "external-id": "nsx-vlan-transportzone-443", "segmentation_id": 443, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47138162-63", "ovs_interfaceid": "47138162-63de-413e-8a1d-767355e11190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.374160] env[62619]: DEBUG oslo_vmware.api [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778247, 'name': ReconfigVM_Task, 'duration_secs': 0.241487} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.375059] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1763.375284] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1763.375543] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5ae787c-2940-46ed-8320-4f8c0e42d1f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.379850] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778248, 'name': Rename_Task, 'duration_secs': 0.157215} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.380473] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1763.380725] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-320aa83b-a47f-48a4-b5c5-912a7a4d2cf7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.385135] env[62619]: DEBUG oslo_vmware.api [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1763.385135] env[62619]: value = "task-1778249" [ 1763.385135] env[62619]: _type = "Task" [ 1763.385135] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.389775] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1763.389775] env[62619]: value = "task-1778250" [ 1763.389775] env[62619]: _type = "Task" [ 1763.389775] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.395792] env[62619]: DEBUG oslo_vmware.api [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.404721] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778250, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.510507] env[62619]: DEBUG nova.compute.utils [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Build of instance f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e aborted: Failed to rebuild volume backed instance. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1763.515316] env[62619]: ERROR nova.compute.manager [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e aborted: Failed to rebuild volume backed instance. [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Traceback (most recent call last): [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self.driver.rebuild(**kwargs) [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] raise NotImplementedError() [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] NotImplementedError [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] During handling of the above exception, another exception occurred: [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Traceback (most recent call last): [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self._detach_root_volume(context, instance, root_bdm) [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] with excutils.save_and_reraise_exception(): [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self.force_reraise() [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] raise self.value [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self.driver.detach_volume(context, old_connection_info, [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] return self._volumeops.detach_volume(connection_info, instance) [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self._detach_volume_vmdk(connection_info, instance) [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] stable_ref.fetch_moref(session) [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] nova.exception.InstanceNotFound: Instance f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e could not be found. [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] During handling of the above exception, another exception occurred: [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Traceback (most recent call last): [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 11246, in _error_out_instance_on_exception [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] yield [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1763.515316] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self._do_rebuild_instance_with_claim( [ 1763.516844] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1763.516844] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self._do_rebuild_instance( [ 1763.516844] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1763.516844] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self._rebuild_default_impl(**kwargs) [ 1763.516844] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1763.516844] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] self._rebuild_volume_backed_instance( [ 1763.516844] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1763.516844] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] raise exception.BuildAbortException( [ 1763.516844] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] nova.exception.BuildAbortException: Build of instance f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e aborted: Failed to rebuild volume backed instance. [ 1763.516844] env[62619]: ERROR nova.compute.manager [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] [ 1763.607008] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.428s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.609786] env[62619]: DEBUG oslo_concurrency.lockutils [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.113s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.610098] env[62619]: DEBUG nova.objects.instance [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lazy-loading 'resources' on Instance uuid b84dd91a-8e08-4476-9683-655357d18370 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1763.708493] env[62619]: DEBUG nova.network.neutron [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Successfully updated port: f1f8853f-2de2-45f7-8853-4591e89cac0b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1763.712457] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778240, 'name': CreateVM_Task, 'duration_secs': 1.64146} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.712922] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1763.713671] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.713902] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.714285] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1763.714746] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76100d78-ab42-42f7-b8dd-fa4b8be41062 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.721746] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1763.721746] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280cbf7-86c2-e430-49d7-44b5d29c3cbe" [ 1763.721746] env[62619]: _type = "Task" [ 1763.721746] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.734860] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280cbf7-86c2-e430-49d7-44b5d29c3cbe, 'name': SearchDatastore_Task, 'duration_secs': 0.010454} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.734860] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.735039] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1763.735990] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.735990] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.735990] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1763.736260] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89424896-4ce6-437a-8d9b-9074b2a436d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.745277] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1763.745471] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1763.746213] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad8df47c-8616-4f81-935f-dcb4dd68fb19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.752651] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1763.752651] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52726643-10dd-3164-d2e9-5045f4894820" [ 1763.752651] env[62619]: _type = "Task" [ 1763.752651] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.760793] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52726643-10dd-3164-d2e9-5045f4894820, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.798690] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.799068] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.876143] env[62619]: DEBUG oslo_concurrency.lockutils [req-df6a5220-bb55-4b4d-9fa0-00a3454ab365 req-15481d3b-652f-416e-b4a5-b05b6d6c2a0e service nova] Releasing lock "refresh_cache-4858096a-9683-4a7c-bbeb-4e6b2f5401cf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.896887] env[62619]: DEBUG oslo_vmware.api [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778249, 'name': PowerOnVM_Task, 'duration_secs': 0.487286} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.897548] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1763.897814] env[62619]: DEBUG nova.compute.manager [None req-f38a7e6f-bbf4-44f8-b82e-2d34c3deda05 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1763.898572] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd85783c-6d8c-41d5-9383-22cfc3252c77 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.903985] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778250, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.119144] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3d84bef-c8b1-4873-9e4f-b9f6843c8d73 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 31.934s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.120022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.492s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.120220] env[62619]: INFO nova.compute.manager [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Unshelving [ 1764.215291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.215291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.215291] env[62619]: DEBUG nova.network.neutron [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1764.253691] env[62619]: DEBUG nova.network.neutron [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance_info_cache with network_info: [{"id": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "address": "fa:16:3e:7e:df:71", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9567749e-9f", "ovs_interfaceid": "9567749e-9f9f-4b05-a445-3099dd2cdff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.265470] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52726643-10dd-3164-d2e9-5045f4894820, 'name': SearchDatastore_Task, 'duration_secs': 0.011697} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.268934] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-985e248b-4286-47fb-a166-c3011ac37b0f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.274678] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1764.274678] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e1d841-6f0c-0a23-ed12-64e8dc9b000f" [ 1764.274678] env[62619]: _type = "Task" [ 1764.274678] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.283752] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e1d841-6f0c-0a23-ed12-64e8dc9b000f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.302251] env[62619]: DEBUG nova.compute.utils [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1764.363785] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3a6dca-093c-4151-ad24-24900d6e17e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.372191] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d6605e-d445-4f56-b42f-e3b552207fc7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.407126] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0ba993-07e1-4c0b-be82-59621dbada4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.419680] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcaf56a-6bc8-4322-8093-c6e512d03472 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.423420] env[62619]: DEBUG oslo_vmware.api [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778250, 'name': PowerOnVM_Task, 'duration_secs': 0.525287} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.424719] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1764.424858] env[62619]: INFO nova.compute.manager [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1764.425054] env[62619]: DEBUG nova.compute.manager [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1764.426323] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57811b84-7b71-4786-8612-467b00db6c69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.436337] env[62619]: DEBUG nova.compute.provider_tree [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1764.749216] env[62619]: WARNING nova.network.neutron [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] ed952a81-cb24-4b52-a137-9ceeefb896cf already exists in list: networks containing: ['ed952a81-cb24-4b52-a137-9ceeefb896cf']. ignoring it [ 1764.760053] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-7cb51b51-514d-4223-a82a-5cdbdab9482a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.760053] env[62619]: DEBUG nova.objects.instance [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lazy-loading 'migration_context' on Instance uuid 7cb51b51-514d-4223-a82a-5cdbdab9482a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1764.787024] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e1d841-6f0c-0a23-ed12-64e8dc9b000f, 'name': SearchDatastore_Task, 'duration_secs': 0.037871} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.787331] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.787596] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e19650da-cc3d-4350-be3e-dc776ce68206/e19650da-cc3d-4350-be3e-dc776ce68206.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1764.787855] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7a1093b-124d-41f0-bc71-fb2473b81bdc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.795730] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1764.795730] env[62619]: value = "task-1778251" [ 1764.795730] env[62619]: _type = "Task" [ 1764.795730] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.803812] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.804441] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.955426] env[62619]: INFO nova.compute.manager [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Took 21.31 seconds to build instance. [ 1764.960063] env[62619]: ERROR nova.scheduler.client.report [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [req-ee9808c8-2f0c-45de-abfc-59a2eb576ab9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ee9808c8-2f0c-45de-abfc-59a2eb576ab9"}]} [ 1764.976046] env[62619]: DEBUG nova.scheduler.client.report [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1764.992477] env[62619]: DEBUG nova.scheduler.client.report [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1764.992742] env[62619]: DEBUG nova.compute.provider_tree [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1765.007092] env[62619]: DEBUG nova.scheduler.client.report [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1765.025899] env[62619]: DEBUG nova.scheduler.client.report [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1765.043211] env[62619]: DEBUG nova.network.neutron [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updating instance_info_cache with network_info: [{"id": "db7fd312-3521-4a87-9acc-4d86d518b63c", "address": "fa:16:3e:14:06:4e", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb7fd312-35", "ovs_interfaceid": "db7fd312-3521-4a87-9acc-4d86d518b63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f1f8853f-2de2-45f7-8853-4591e89cac0b", "address": "fa:16:3e:30:6b:b8", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1f8853f-2d", "ovs_interfaceid": "f1f8853f-2de2-45f7-8853-4591e89cac0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1765.131734] env[62619]: DEBUG nova.compute.utils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1765.241653] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b389ade8-6b07-4a5e-983e-507b975dff4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.253332] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f01f4e-6fd2-452c-bf67-8bad2f32c9cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.262828] env[62619]: DEBUG nova.objects.base [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Object Instance<7cb51b51-514d-4223-a82a-5cdbdab9482a> lazy-loaded attributes: info_cache,migration_context {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1765.263923] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c7be5c-5ba9-473d-a30e-584e661cc3eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.294832] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf6a8b8-4795-4c4d-b37d-8e9f4e2e9736 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.316462] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c2719a7-8fa5-4595-93b5-a925227a1e4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.323154] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c78f54f-874d-4004-8568-dc1495419271 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.331408] env[62619]: DEBUG oslo_vmware.api [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1765.331408] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522c07c4-c6e6-facc-7269-e782ecaa08a4" [ 1765.331408] env[62619]: _type = "Task" [ 1765.331408] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.331658] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778251, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.346010] env[62619]: DEBUG nova.compute.provider_tree [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1765.354120] env[62619]: DEBUG oslo_vmware.api [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522c07c4-c6e6-facc-7269-e782ecaa08a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.457903] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a178cd1e-3ce1-42f2-8594-4cd583c77c89 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.820s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.532935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.547716] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.548469] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.548647] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.549614] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24882815-c6f2-46f7-9d7e-740e188a4a3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.568750] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1765.569059] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1765.569255] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1765.569476] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1765.569658] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1765.569862] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1765.570120] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1765.570489] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1765.570726] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1765.570935] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1765.571158] env[62619]: DEBUG nova.virt.hardware [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1765.577533] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Reconfiguring VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1765.577916] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edb45c00-baa9-4c0a-81a1-5827266b5d1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.596396] env[62619]: DEBUG oslo_vmware.api [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1765.596396] env[62619]: value = "task-1778252" [ 1765.596396] env[62619]: _type = "Task" [ 1765.596396] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.604442] env[62619]: DEBUG oslo_vmware.api [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778252, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.636999] env[62619]: INFO nova.virt.block_device [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Booting with volume ac33ba75-c333-4e12-8448-75caf34bd9c5 at /dev/sdb [ 1765.670351] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6aa443d8-6d39-41be-af75-86dc1b7bada7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.681192] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa43e053-7b72-46e8-9ece-57241323d54d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.716983] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b65c2e05-c5da-4fb2-b7ef-7c84d8e22199 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.724854] env[62619]: DEBUG nova.compute.manager [req-8fda914d-7fe4-4c07-abb3-a2728fc68f72 req-d4eccf91-a57e-4ac3-a3e1-f5dbce867755 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Received event network-changed-f1f8853f-2de2-45f7-8853-4591e89cac0b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1765.724854] env[62619]: DEBUG nova.compute.manager [req-8fda914d-7fe4-4c07-abb3-a2728fc68f72 req-d4eccf91-a57e-4ac3-a3e1-f5dbce867755 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Refreshing instance network info cache due to event network-changed-f1f8853f-2de2-45f7-8853-4591e89cac0b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1765.725058] env[62619]: DEBUG oslo_concurrency.lockutils [req-8fda914d-7fe4-4c07-abb3-a2728fc68f72 req-d4eccf91-a57e-4ac3-a3e1-f5dbce867755 service nova] Acquiring lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.725238] env[62619]: DEBUG oslo_concurrency.lockutils [req-8fda914d-7fe4-4c07-abb3-a2728fc68f72 req-d4eccf91-a57e-4ac3-a3e1-f5dbce867755 service nova] Acquired lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.725434] env[62619]: DEBUG nova.network.neutron [req-8fda914d-7fe4-4c07-abb3-a2728fc68f72 req-d4eccf91-a57e-4ac3-a3e1-f5dbce867755 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Refreshing network info cache for port f1f8853f-2de2-45f7-8853-4591e89cac0b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1765.732740] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090b45b4-4972-43a4-9a56-831695c54b59 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.778230] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e838f18e-12de-432d-9f0b-2e54c0a13b2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.785938] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06c96a3-f8de-4de0-ac35-79a440d24587 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.801247] env[62619]: DEBUG nova.virt.block_device [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updating existing volume attachment record: a9411a69-18d6-48ca-b716-0bec173d78dd {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1765.821607] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778251, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.645308} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.821861] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e19650da-cc3d-4350-be3e-dc776ce68206/e19650da-cc3d-4350-be3e-dc776ce68206.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1765.822086] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1765.822333] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf246f51-ff0e-422a-af3a-0a45ca6a255a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.829043] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1765.829043] env[62619]: value = "task-1778253" [ 1765.829043] env[62619]: _type = "Task" [ 1765.829043] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.844664] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778253, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.844937] env[62619]: DEBUG oslo_vmware.api [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522c07c4-c6e6-facc-7269-e782ecaa08a4, 'name': SearchDatastore_Task, 'duration_secs': 0.031892} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.845229] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.903872] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.904180] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.904419] env[62619]: INFO nova.compute.manager [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Attaching volume 35d065ad-5735-4fa5-b238-762eaceed4e2 to /dev/sdb [ 1765.909238] env[62619]: DEBUG nova.scheduler.client.report [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1765.909490] env[62619]: DEBUG nova.compute.provider_tree [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 134 to 135 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1765.909806] env[62619]: DEBUG nova.compute.provider_tree [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1765.947424] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb20375-2a54-4dde-8f74-0fabe90d19ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.955592] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7426a67a-fe20-4fcd-9eb2-dbadde7baffc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.975015] env[62619]: DEBUG nova.virt.block_device [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating existing volume attachment record: 91e29493-47dc-433e-be6c-5b505f42f08b {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1766.107758] env[62619]: DEBUG oslo_vmware.api [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778252, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.339839] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778253, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.174382} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.340310] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1766.341371] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1ec404-6ab3-45c9-97df-736cc83f5de9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.371447] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] e19650da-cc3d-4350-be3e-dc776ce68206/e19650da-cc3d-4350-be3e-dc776ce68206.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1766.374062] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b450f19e-2187-470f-9b0b-a2839701828b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.399401] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1766.399401] env[62619]: value = "task-1778257" [ 1766.399401] env[62619]: _type = "Task" [ 1766.399401] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.412649] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.418779] env[62619]: DEBUG oslo_concurrency.lockutils [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.808s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.420569] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.697s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.420696] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.420901] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1766.421411] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.288s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.423031] env[62619]: INFO nova.compute.claims [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1766.426345] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723ab311-80c6-4232-a8fd-1ed60238fd94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.441578] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1942d09-84fb-420c-a97d-ee3ab019fa77 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.461862] env[62619]: INFO nova.scheduler.client.report [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Deleted allocations for instance b84dd91a-8e08-4476-9683-655357d18370 [ 1766.463643] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226dbafa-e1f3-427e-928e-9b6e3a51dfcd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.478974] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d356452-ec66-4b46-a95f-c4f68384d1df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.514213] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179523MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1766.514213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.555562] env[62619]: DEBUG nova.network.neutron [req-8fda914d-7fe4-4c07-abb3-a2728fc68f72 req-d4eccf91-a57e-4ac3-a3e1-f5dbce867755 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updated VIF entry in instance network info cache for port f1f8853f-2de2-45f7-8853-4591e89cac0b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1766.556113] env[62619]: DEBUG nova.network.neutron [req-8fda914d-7fe4-4c07-abb3-a2728fc68f72 req-d4eccf91-a57e-4ac3-a3e1-f5dbce867755 service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updating instance_info_cache with network_info: [{"id": "db7fd312-3521-4a87-9acc-4d86d518b63c", "address": "fa:16:3e:14:06:4e", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb7fd312-35", "ovs_interfaceid": "db7fd312-3521-4a87-9acc-4d86d518b63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f1f8853f-2de2-45f7-8853-4591e89cac0b", "address": "fa:16:3e:30:6b:b8", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1f8853f-2d", "ovs_interfaceid": "f1f8853f-2de2-45f7-8853-4591e89cac0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.609275] env[62619]: DEBUG oslo_vmware.api [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778252, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.910790] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778257, 'name': ReconfigVM_Task, 'duration_secs': 0.463208} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.911148] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Reconfigured VM instance instance-0000005d to attach disk [datastore1] e19650da-cc3d-4350-be3e-dc776ce68206/e19650da-cc3d-4350-be3e-dc776ce68206.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1766.911878] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dfb49ccd-2571-49da-841c-ed0b77412c9b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.920710] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1766.920710] env[62619]: value = "task-1778261" [ 1766.920710] env[62619]: _type = "Task" [ 1766.920710] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.934285] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778261, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.973934] env[62619]: DEBUG oslo_concurrency.lockutils [None req-734d675e-246a-4c3b-ac42-368177bf7b19 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "b84dd91a-8e08-4476-9683-655357d18370" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.597s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.975048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "b84dd91a-8e08-4476-9683-655357d18370" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.331s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.975206] env[62619]: DEBUG oslo_concurrency.lockutils [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "b84dd91a-8e08-4476-9683-655357d18370-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.975408] env[62619]: DEBUG oslo_concurrency.lockutils [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "b84dd91a-8e08-4476-9683-655357d18370-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.975577] env[62619]: DEBUG oslo_concurrency.lockutils [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "b84dd91a-8e08-4476-9683-655357d18370-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.980510] env[62619]: INFO nova.compute.manager [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Terminating instance [ 1767.059779] env[62619]: DEBUG oslo_concurrency.lockutils [req-8fda914d-7fe4-4c07-abb3-a2728fc68f72 req-d4eccf91-a57e-4ac3-a3e1-f5dbce867755 service nova] Releasing lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.108626] env[62619]: DEBUG oslo_vmware.api [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778252, 'name': ReconfigVM_Task, 'duration_secs': 1.180881} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.109033] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.109247] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Reconfigured VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1767.432329] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778261, 'name': Rename_Task, 'duration_secs': 0.244328} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.432668] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1767.432861] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa3cee05-cf7f-447e-9c55-87eec92693f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.440378] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1767.440378] env[62619]: value = "task-1778262" [ 1767.440378] env[62619]: _type = "Task" [ 1767.440378] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.449234] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778262, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.483153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.483402] env[62619]: DEBUG oslo_concurrency.lockutils [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquired lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.483511] env[62619]: DEBUG nova.network.neutron [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1767.614458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ccb4ec50-6a59-442b-81da-63caec7c3019 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-4983b333-debb-4a2b-b28d-b321f0d8d7d7-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.564s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.676139] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e053ddb-2148-4223-bfac-d1e0fab4e76d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.684431] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80d9657-79e3-4e48-85a0-b81b861ae852 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.716021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "88f79718-97d0-432c-8515-b60ab3dfd7e0" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.716227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.716480] env[62619]: INFO nova.compute.manager [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Shelving [ 1767.718372] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5cfae5-7da1-4dd3-96aa-175bf5ad7389 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.727432] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9dc015-fa7b-484b-9a4d-01dc4a0140ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.742221] env[62619]: DEBUG nova.compute.provider_tree [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1767.785598] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Acquiring lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.785742] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.785944] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Acquiring lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.786139] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.786408] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.788474] env[62619]: INFO nova.compute.manager [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Terminating instance [ 1767.951814] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778262, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.973348] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cf7cb6-3293-427d-b108-447f8cbbec60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.981046] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01d731c-3180-44f1-b5c8-6892d1ac77cb tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Suspending the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1767.981301] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-456c9da8-f26e-4e6d-a46b-2a0c4acad429 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.985486] env[62619]: DEBUG nova.compute.utils [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Can not refresh info_cache because instance was not found {{(pid=62619) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1767.989144] env[62619]: DEBUG oslo_vmware.api [None req-d01d731c-3180-44f1-b5c8-6892d1ac77cb tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1767.989144] env[62619]: value = "task-1778263" [ 1767.989144] env[62619]: _type = "Task" [ 1767.989144] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.997614] env[62619]: DEBUG oslo_vmware.api [None req-d01d731c-3180-44f1-b5c8-6892d1ac77cb tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778263, 'name': SuspendVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.004425] env[62619]: DEBUG nova.network.neutron [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1768.082773] env[62619]: DEBUG nova.network.neutron [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.245418] env[62619]: DEBUG nova.scheduler.client.report [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1768.292656] env[62619]: DEBUG nova.compute.manager [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1768.293760] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8139715-757d-49b1-8b8f-261b223778fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.304802] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62f3f73-cf6c-4edc-be9f-f65051a613d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.342910] env[62619]: WARNING nova.virt.vmwareapi.driver [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e could not be found. [ 1768.343152] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1768.343563] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01095aa4-ca35-4e2e-87f8-d51f78dccbdf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.353364] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259ee416-352d-4ec2-81de-fcb72b0b8606 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.387060] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e could not be found. [ 1768.387438] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1768.387701] env[62619]: INFO nova.compute.manager [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Took 0.09 seconds to destroy the instance on the hypervisor. [ 1768.387786] env[62619]: DEBUG oslo.service.loopingcall [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1768.388009] env[62619]: DEBUG nova.compute.manager [-] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1768.388113] env[62619]: DEBUG nova.network.neutron [-] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1768.452959] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778262, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.499422] env[62619]: DEBUG oslo_vmware.api [None req-d01d731c-3180-44f1-b5c8-6892d1ac77cb tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778263, 'name': SuspendVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.588050] env[62619]: DEBUG oslo_concurrency.lockutils [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Releasing lock "refresh_cache-b84dd91a-8e08-4476-9683-655357d18370" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.588050] env[62619]: DEBUG nova.compute.manager [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1768.588050] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1768.588050] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72a80c11-683f-4481-983b-65898452354b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.596800] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a956359c-6eb5-4586-b19d-b0f1d12ecd8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.638726] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b84dd91a-8e08-4476-9683-655357d18370 could not be found. [ 1768.638955] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1768.639156] env[62619]: INFO nova.compute.manager [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1768.639407] env[62619]: DEBUG oslo.service.loopingcall [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1768.639702] env[62619]: DEBUG nova.compute.manager [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1768.639797] env[62619]: DEBUG nova.network.neutron [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1768.669836] env[62619]: DEBUG nova.network.neutron [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1768.728606] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1768.729307] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a047f888-156e-41b4-bcd4-ed72f75c20b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.740837] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1768.740837] env[62619]: value = "task-1778265" [ 1768.740837] env[62619]: _type = "Task" [ 1768.740837] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.750045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.750562] env[62619]: DEBUG nova.compute.manager [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1768.757027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.220s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.761538] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.925284] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.925284] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.925284] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.925284] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.929247] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.929247] env[62619]: INFO nova.compute.manager [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Terminating instance [ 1768.958649] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778262, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.004891] env[62619]: DEBUG oslo_vmware.api [None req-d01d731c-3180-44f1-b5c8-6892d1ac77cb tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778263, 'name': SuspendVM_Task} progress is 58%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.042076] env[62619]: DEBUG nova.compute.manager [req-f6821a3d-ab5d-47be-8d14-0a9daa770c08 req-3bffd2cb-603a-489a-b026-1ea3c129742f service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Received event network-vif-deleted-ac59f3e2-0841-445b-8907-932fdd3f4d2c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1769.042297] env[62619]: INFO nova.compute.manager [req-f6821a3d-ab5d-47be-8d14-0a9daa770c08 req-3bffd2cb-603a-489a-b026-1ea3c129742f service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Neutron deleted interface ac59f3e2-0841-445b-8907-932fdd3f4d2c; detaching it from the instance and deleting it from the info cache [ 1769.042468] env[62619]: DEBUG nova.network.neutron [req-f6821a3d-ab5d-47be-8d14-0a9daa770c08 req-3bffd2cb-603a-489a-b026-1ea3c129742f service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.060437] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6f8025-3b24-4c6a-9e83-acc1fc347ef8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.071757] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e8b57f-4496-406d-b2af-843181703edc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.118423] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c099e4f7-3e66-46d0-a6c4-8ec91df084e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.126249] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b1c909-9635-4124-9381-a5db565e7e74 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.143818] env[62619]: DEBUG nova.compute.provider_tree [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1769.172284] env[62619]: DEBUG nova.network.neutron [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.253391] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778265, 'name': PowerOffVM_Task, 'duration_secs': 0.424214} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.254441] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1769.255632] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d18f8b-1e4c-4f34-ba58-83fe37235bcd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.261277] env[62619]: DEBUG nova.compute.utils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1769.276928] env[62619]: DEBUG nova.compute.manager [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1769.277173] env[62619]: DEBUG nova.network.neutron [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1769.280167] env[62619]: DEBUG nova.compute.manager [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1769.283498] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e5d038-3703-43f2-8c67-7217a799f771 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.355222] env[62619]: DEBUG nova.policy [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e9094d6b3854c1184307d9bc35a966e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e11e1bca0c747fd8b4a0ca3e220ba4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1769.391944] env[62619]: DEBUG nova.network.neutron [-] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.436035] env[62619]: DEBUG nova.compute.manager [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1769.436303] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1769.437272] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c974772-b0a2-4992-9c4e-a7522e845b6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.450731] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1769.451482] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-badaa343-9b8e-42c6-89a0-43a17b4e70ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.457614] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778262, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.458782] env[62619]: DEBUG oslo_vmware.api [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1769.458782] env[62619]: value = "task-1778266" [ 1769.458782] env[62619]: _type = "Task" [ 1769.458782] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.469583] env[62619]: DEBUG oslo_vmware.api [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778266, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.503588] env[62619]: DEBUG oslo_vmware.api [None req-d01d731c-3180-44f1-b5c8-6892d1ac77cb tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778263, 'name': SuspendVM_Task, 'duration_secs': 1.330166} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.503859] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01d731c-3180-44f1-b5c8-6892d1ac77cb tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Suspended the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1769.504048] env[62619]: DEBUG nova.compute.manager [None req-d01d731c-3180-44f1-b5c8-6892d1ac77cb tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1769.504865] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76fced8-1059-4ca8-b854-57a6b46cf33b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.545780] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21e14964-97d4-4987-9e67-42a13ef1fd04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.557337] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4568b01-464c-4aad-b1eb-f1ebc7917ada {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.596869] env[62619]: DEBUG nova.compute.manager [req-f6821a3d-ab5d-47be-8d14-0a9daa770c08 req-3bffd2cb-603a-489a-b026-1ea3c129742f service nova] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Detach interface failed, port_id=ac59f3e2-0841-445b-8907-932fdd3f4d2c, reason: Instance f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1769.650020] env[62619]: DEBUG nova.scheduler.client.report [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1769.675189] env[62619]: INFO nova.compute.manager [-] [instance: b84dd91a-8e08-4476-9683-655357d18370] Took 1.04 seconds to deallocate network for instance. [ 1769.739625] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "interface-4983b333-debb-4a2b-b28d-b321f0d8d7d7-f1f8853f-2de2-45f7-8853-4591e89cac0b" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.739909] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-4983b333-debb-4a2b-b28d-b321f0d8d7d7-f1f8853f-2de2-45f7-8853-4591e89cac0b" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.795557] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1769.797656] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-145cd781-dd5d-4ea0-88f2-73e44596aa92 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.805823] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1769.805823] env[62619]: value = "task-1778267" [ 1769.805823] env[62619]: _type = "Task" [ 1769.805823] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.816631] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778267, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.832420] env[62619]: DEBUG nova.network.neutron [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Successfully created port: e81bada3-ed22-4c30-a4dd-4c371831520b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1769.894352] env[62619]: INFO nova.compute.manager [-] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Took 1.51 seconds to deallocate network for instance. [ 1769.953940] env[62619]: DEBUG oslo_vmware.api [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778262, 'name': PowerOnVM_Task, 'duration_secs': 2.300071} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.954232] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1769.954458] env[62619]: INFO nova.compute.manager [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Took 10.75 seconds to spawn the instance on the hypervisor. [ 1769.954631] env[62619]: DEBUG nova.compute.manager [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1769.955465] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57735699-79af-4ce8-b51e-c1c7e7af2b6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.972642] env[62619]: DEBUG oslo_vmware.api [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778266, 'name': PowerOffVM_Task, 'duration_secs': 0.419145} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.972881] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1769.973070] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1769.973319] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6594fcb7-00e0-4987-80de-9b3b2838da88 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.153298] env[62619]: DEBUG oslo_concurrency.lockutils [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.400s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.153473] env[62619]: INFO nova.compute.manager [None req-364e48b0-20e0-4adf-a146-367a4e205f0d tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Successfully reverted task state from rebuilding on failure for instance. [ 1770.158813] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.314s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.181194] env[62619]: INFO nova.compute.manager [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: b84dd91a-8e08-4476-9683-655357d18370] Instance disappeared during terminate [ 1770.181399] env[62619]: DEBUG oslo_concurrency.lockutils [None req-905e4318-ddb7-4f36-a436-e83f43faa0ba tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "b84dd91a-8e08-4476-9683-655357d18370" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.207s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.243058] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1770.243058] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.243467] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457455a6-f347-45f2-a998-eee555169399 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.262240] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20cd450-42da-4618-86e6-4eddfb1d50fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.290046] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Reconfiguring VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1770.290395] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-363e6af1-ea0b-4e72-9ca0-3459c77e29c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.304736] env[62619]: DEBUG nova.compute.manager [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1770.313676] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1770.313676] env[62619]: value = "task-1778269" [ 1770.313676] env[62619]: _type = "Task" [ 1770.313676] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.318577] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778267, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.329460] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.341940] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1770.342246] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1770.342403] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1770.342638] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1770.342914] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1770.343053] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1770.343339] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1770.343569] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1770.344068] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1770.344280] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1770.344527] env[62619]: DEBUG nova.virt.hardware [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1770.345578] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63cf56c-19ad-4d19-9a4f-b5d33008d0d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.356120] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5266ae36-edae-46e6-a0a2-9788598621b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.361160] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1770.361380] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1770.361565] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Deleting the datastore file [datastore1] 4858096a-9683-4a7c-bbeb-4e6b2f5401cf {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1770.362256] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24472902-8ef3-4654-8151-6ecd262b0a6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.374979] env[62619]: DEBUG oslo_vmware.api [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for the task: (returnval){ [ 1770.374979] env[62619]: value = "task-1778270" [ 1770.374979] env[62619]: _type = "Task" [ 1770.374979] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.383863] env[62619]: DEBUG oslo_vmware.api [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.480178] env[62619]: INFO nova.compute.manager [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Took 24.31 seconds to build instance. [ 1770.825729] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778267, 'name': CreateSnapshot_Task, 'duration_secs': 0.641556} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.826028] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1770.827430] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7018aeb2-8f63-47f8-8f9a-93a33f7468df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.833055] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.886848] env[62619]: DEBUG oslo_vmware.api [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Task: {'id': task-1778270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200707} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.886848] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1770.887013] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1770.887357] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1770.888104] env[62619]: INFO nova.compute.manager [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Took 1.45 seconds to destroy the instance on the hypervisor. [ 1770.889165] env[62619]: DEBUG oslo.service.loopingcall [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1770.891211] env[62619]: DEBUG nova.compute.manager [-] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1770.891331] env[62619]: DEBUG nova.network.neutron [-] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1770.984842] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c136b0f3-9efa-4bdf-9c76-434eea0fccc3 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lock "e19650da-cc3d-4350-be3e-dc776ce68206" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.822s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.985770] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e505b2-514d-4a53-b579-9111e7d56378 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.995393] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb35fdaf-2112-42eb-9863-eb2fe5ec2a75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.030602] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c23926-480e-422c-9b94-1571fa5714f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.041389] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f6357f-e7ae-4e73-b763-9dd4510a4c77 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.066832] env[62619]: DEBUG nova.compute.provider_tree [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1771.232885] env[62619]: INFO nova.compute.manager [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Resuming [ 1771.233230] env[62619]: DEBUG nova.objects.instance [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lazy-loading 'flavor' on Instance uuid 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1771.329113] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.355897] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1771.358580] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4b8795a1-acdd-45b7-acc7-c45a1c421b83 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.369384] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1771.369384] env[62619]: value = "task-1778272" [ 1771.369384] env[62619]: _type = "Task" [ 1771.369384] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.380961] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778272, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.411481] env[62619]: DEBUG nova.compute.manager [req-8ef00efc-879b-4656-a1a4-2eeae67b28f0 req-4f3cef6e-2626-4841-bd0d-0c209537e7c0 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Received event network-vif-deleted-47138162-63de-413e-8a1d-767355e11190 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1771.411681] env[62619]: INFO nova.compute.manager [req-8ef00efc-879b-4656-a1a4-2eeae67b28f0 req-4f3cef6e-2626-4841-bd0d-0c209537e7c0 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Neutron deleted interface 47138162-63de-413e-8a1d-767355e11190; detaching it from the instance and deleting it from the info cache [ 1771.411957] env[62619]: DEBUG nova.network.neutron [req-8ef00efc-879b-4656-a1a4-2eeae67b28f0 req-4f3cef6e-2626-4841-bd0d-0c209537e7c0 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1771.561386] env[62619]: DEBUG nova.compute.manager [req-15040965-bfa1-4d35-a2ce-7364c7b0af51 req-eddae291-1fa8-477f-a2e7-3d95fd5f79cd service nova] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Received event network-vif-plugged-e81bada3-ed22-4c30-a4dd-4c371831520b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1771.561653] env[62619]: DEBUG oslo_concurrency.lockutils [req-15040965-bfa1-4d35-a2ce-7364c7b0af51 req-eddae291-1fa8-477f-a2e7-3d95fd5f79cd service nova] Acquiring lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.561856] env[62619]: DEBUG oslo_concurrency.lockutils [req-15040965-bfa1-4d35-a2ce-7364c7b0af51 req-eddae291-1fa8-477f-a2e7-3d95fd5f79cd service nova] Lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.562035] env[62619]: DEBUG oslo_concurrency.lockutils [req-15040965-bfa1-4d35-a2ce-7364c7b0af51 req-eddae291-1fa8-477f-a2e7-3d95fd5f79cd service nova] Lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.562220] env[62619]: DEBUG nova.compute.manager [req-15040965-bfa1-4d35-a2ce-7364c7b0af51 req-eddae291-1fa8-477f-a2e7-3d95fd5f79cd service nova] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] No waiting events found dispatching network-vif-plugged-e81bada3-ed22-4c30-a4dd-4c371831520b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1771.562381] env[62619]: WARNING nova.compute.manager [req-15040965-bfa1-4d35-a2ce-7364c7b0af51 req-eddae291-1fa8-477f-a2e7-3d95fd5f79cd service nova] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Received unexpected event network-vif-plugged-e81bada3-ed22-4c30-a4dd-4c371831520b for instance with vm_state building and task_state spawning. [ 1771.573079] env[62619]: DEBUG nova.scheduler.client.report [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1771.603327] env[62619]: INFO nova.compute.manager [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Took 1.71 seconds to detach 1 volumes for instance. [ 1771.604404] env[62619]: DEBUG nova.compute.manager [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Deleting volume: cd357a5b-17b3-4318-9f18-e8e95fb8d9d9 {{(pid=62619) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1771.830639] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.865500] env[62619]: DEBUG nova.network.neutron [-] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1771.885888] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778272, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.916344] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73bab014-35e6-4245-a2e7-1c8f426746df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.929927] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef9307c-d670-40f9-a920-2eb17c4ccec7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.978171] env[62619]: DEBUG nova.compute.manager [req-8ef00efc-879b-4656-a1a4-2eeae67b28f0 req-4f3cef6e-2626-4841-bd0d-0c209537e7c0 service nova] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Detach interface failed, port_id=47138162-63de-413e-8a1d-767355e11190, reason: Instance 4858096a-9683-4a7c-bbeb-4e6b2f5401cf could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1772.143807] env[62619]: DEBUG nova.network.neutron [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Successfully updated port: e81bada3-ed22-4c30-a4dd-4c371831520b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1772.188927] env[62619]: DEBUG nova.compute.manager [req-40260319-688b-49db-bbde-0f39566934ac req-d7f3b799-acc3-43a1-8d78-1f66c5f0bdc8 service nova] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Received event network-changed-e81bada3-ed22-4c30-a4dd-4c371831520b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1772.189147] env[62619]: DEBUG nova.compute.manager [req-40260319-688b-49db-bbde-0f39566934ac req-d7f3b799-acc3-43a1-8d78-1f66c5f0bdc8 service nova] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Refreshing instance network info cache due to event network-changed-e81bada3-ed22-4c30-a4dd-4c371831520b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1772.189365] env[62619]: DEBUG oslo_concurrency.lockutils [req-40260319-688b-49db-bbde-0f39566934ac req-d7f3b799-acc3-43a1-8d78-1f66c5f0bdc8 service nova] Acquiring lock "refresh_cache-848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.189505] env[62619]: DEBUG oslo_concurrency.lockutils [req-40260319-688b-49db-bbde-0f39566934ac req-d7f3b799-acc3-43a1-8d78-1f66c5f0bdc8 service nova] Acquired lock "refresh_cache-848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.189664] env[62619]: DEBUG nova.network.neutron [req-40260319-688b-49db-bbde-0f39566934ac req-d7f3b799-acc3-43a1-8d78-1f66c5f0bdc8 service nova] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Refreshing network info cache for port e81bada3-ed22-4c30-a4dd-4c371831520b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1772.191759] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.199687] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.330530] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.372271] env[62619]: INFO nova.compute.manager [-] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Took 1.48 seconds to deallocate network for instance. [ 1772.388090] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778272, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.590763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.432s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.595150] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.081s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.646119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "refresh_cache-848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.738796] env[62619]: DEBUG nova.network.neutron [req-40260319-688b-49db-bbde-0f39566934ac req-d7f3b799-acc3-43a1-8d78-1f66c5f0bdc8 service nova] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1772.746751] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.746946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquired lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.747151] env[62619]: DEBUG nova.network.neutron [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1772.826778] env[62619]: DEBUG nova.network.neutron [req-40260319-688b-49db-bbde-0f39566934ac req-d7f3b799-acc3-43a1-8d78-1f66c5f0bdc8 service nova] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.834671] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.883582] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.883655] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778272, 'name': CloneVM_Task, 'duration_secs': 1.463728} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.883855] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Created linked-clone VM from snapshot [ 1772.884602] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ddb24b-7fda-468f-8962-84d3231cf27e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.893180] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Uploading image 4589a8a5-619c-4fa5-ba93-0bf4eb7ad36a {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1772.915488] env[62619]: DEBUG oslo_vmware.rw_handles [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1772.915488] env[62619]: value = "vm-369137" [ 1772.915488] env[62619]: _type = "VirtualMachine" [ 1772.915488] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1772.915759] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-39c6fb0e-3bbb-445e-aadc-2601237df790 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.926690] env[62619]: DEBUG oslo_vmware.rw_handles [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lease: (returnval){ [ 1772.926690] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e7886-8a52-02d2-c672-5295c1bfb17b" [ 1772.926690] env[62619]: _type = "HttpNfcLease" [ 1772.926690] env[62619]: } obtained for exporting VM: (result){ [ 1772.926690] env[62619]: value = "vm-369137" [ 1772.926690] env[62619]: _type = "VirtualMachine" [ 1772.926690] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1772.927153] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the lease: (returnval){ [ 1772.927153] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e7886-8a52-02d2-c672-5295c1bfb17b" [ 1772.927153] env[62619]: _type = "HttpNfcLease" [ 1772.927153] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1772.934674] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1772.934674] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e7886-8a52-02d2-c672-5295c1bfb17b" [ 1772.934674] env[62619]: _type = "HttpNfcLease" [ 1772.934674] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1772.998683] env[62619]: DEBUG nova.compute.manager [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1772.999677] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2bd9ff-d419-4cf9-8b47-70b662c5acbc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.027162] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1773.027405] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369135', 'volume_id': '35d065ad-5735-4fa5-b238-762eaceed4e2', 'name': 'volume-35d065ad-5735-4fa5-b238-762eaceed4e2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '917960ca-3870-4e4e-aafe-3c6d77cf7c51', 'attached_at': '', 'detached_at': '', 'volume_id': '35d065ad-5735-4fa5-b238-762eaceed4e2', 'serial': '35d065ad-5735-4fa5-b238-762eaceed4e2'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1773.028328] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af42caaf-b5eb-46b0-8edd-0445e65c63fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.049221] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f11462f-bd9c-4d7d-a3e4-e10b4aef352d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.073104] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] volume-35d065ad-5735-4fa5-b238-762eaceed4e2/volume-35d065ad-5735-4fa5-b238-762eaceed4e2.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1773.073678] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01e2f599-fa0b-44e8-9fe8-63ead33d976e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.093949] env[62619]: DEBUG oslo_vmware.api [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1773.093949] env[62619]: value = "task-1778275" [ 1773.093949] env[62619]: _type = "Task" [ 1773.093949] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.114125] env[62619]: DEBUG oslo_vmware.api [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778275, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.166262] env[62619]: INFO nova.scheduler.client.report [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted allocation for migration 1b3a5f25-cf05-4d16-a7d4-796835ed89d7 [ 1773.330467] env[62619]: DEBUG oslo_concurrency.lockutils [req-40260319-688b-49db-bbde-0f39566934ac req-d7f3b799-acc3-43a1-8d78-1f66c5f0bdc8 service nova] Releasing lock "refresh_cache-848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1773.330879] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.331074] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "refresh_cache-848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.331222] env[62619]: DEBUG nova.network.neutron [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1773.437522] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1773.437522] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e7886-8a52-02d2-c672-5295c1bfb17b" [ 1773.437522] env[62619]: _type = "HttpNfcLease" [ 1773.437522] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1773.437989] env[62619]: DEBUG oslo_vmware.rw_handles [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1773.437989] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521e7886-8a52-02d2-c672-5295c1bfb17b" [ 1773.437989] env[62619]: _type = "HttpNfcLease" [ 1773.437989] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1773.438601] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd29f360-fa2c-4f48-8d3a-9baf48967587 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.446154] env[62619]: DEBUG oslo_vmware.rw_handles [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fbf436-21c1-4783-7263-95a20026492b/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1773.446352] env[62619]: DEBUG oslo_vmware.rw_handles [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fbf436-21c1-4783-7263-95a20026492b/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1773.511855] env[62619]: DEBUG nova.network.neutron [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updating instance_info_cache with network_info: [{"id": "a10c5399-b021-4ea7-8a41-4d58136aff12", "address": "fa:16:3e:14:4c:3d", "network": {"id": "7e652693-afc0-4c29-9a04-cad63acf109b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-107922130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf557954d79d4bb1939f6e65d4ed00b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa10c5399-b0", "ovs_interfaceid": "a10c5399-b021-4ea7-8a41-4d58136aff12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.513724] env[62619]: INFO nova.compute.manager [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] instance snapshotting [ 1773.519822] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408ec208-8fbe-472a-b4ff-52d3e9ca3641 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.543300] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d01081-b14f-4e7a-b4c6-d9369e6579d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.551537] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b6560738-bd08-40e5-99e3-4d099496a443 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.607300] env[62619]: DEBUG oslo_vmware.api [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.635545] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.635730] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e32cb991-a018-4b55-8cdf-378e212c8434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.635857] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 11869077-b428-413f-9f8f-7eac08d2d9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.635981] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4983b333-debb-4a2b-b28d-b321f0d8d7d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.636138] env[62619]: WARNING nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4858096a-9683-4a7c-bbeb-4e6b2f5401cf is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1773.636254] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 917960ca-3870-4e4e-aafe-3c6d77cf7c51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.636366] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 8745aa7f-9848-4320-94b5-08b7e3bccf80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.636486] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e302e431-1f95-4ab5-bfca-59450fd887f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.636880] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 32aed8cd-1583-4253-bfb6-a98610e2f32e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.637058] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.637581] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance f3345332-5a22-4a1c-ac74-4e8f2ceb3f15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.637713] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 7cb51b51-514d-4223-a82a-5cdbdab9482a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.637880] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 88f79718-97d0-432c-8515-b60ab3dfd7e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.638012] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e19650da-cc3d-4350-be3e-dc776ce68206 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.638127] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1773.659029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "52b29fef-eab6-4541-a570-af9c0c021a75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.659271] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.672236] env[62619]: DEBUG oslo_concurrency.lockutils [None req-90078d1e-0aac-43bd-929a-229e2859bb90 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.278s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.835254] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.878710] env[62619]: DEBUG nova.network.neutron [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1774.018509] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Releasing lock "refresh_cache-4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.019523] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd22127-900f-42a8-9a7e-285b23bf8eca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.027670] env[62619]: DEBUG oslo_concurrency.lockutils [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "7cb51b51-514d-4223-a82a-5cdbdab9482a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.029126] env[62619]: DEBUG oslo_concurrency.lockutils [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.029423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "7cb51b51-514d-4223-a82a-5cdbdab9482a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.029684] env[62619]: DEBUG oslo_concurrency.lockutils [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.029885] env[62619]: DEBUG oslo_concurrency.lockutils [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.032627] env[62619]: INFO nova.compute.manager [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Terminating instance [ 1774.040128] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Resuming the VM {{(pid=62619) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1774.040473] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92a3ef63-8ffe-454d-838f-5d49ffb68ae9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.049694] env[62619]: DEBUG oslo_vmware.api [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1774.049694] env[62619]: value = "task-1778276" [ 1774.049694] env[62619]: _type = "Task" [ 1774.049694] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.070370] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1774.071061] env[62619]: DEBUG oslo_vmware.api [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778276, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.071170] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b9b9a5f8-4f79-421b-869d-dd41dd77b2cb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.080149] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1774.080149] env[62619]: value = "task-1778277" [ 1774.080149] env[62619]: _type = "Task" [ 1774.080149] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.091684] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778277, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.104070] env[62619]: DEBUG nova.network.neutron [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Updating instance_info_cache with network_info: [{"id": "e81bada3-ed22-4c30-a4dd-4c371831520b", "address": "fa:16:3e:39:fc:f8", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape81bada3-ed", "ovs_interfaceid": "e81bada3-ed22-4c30-a4dd-4c371831520b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.109381] env[62619]: DEBUG oslo_vmware.api [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778275, 'name': ReconfigVM_Task, 'duration_secs': 0.862837} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.109671] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfigured VM instance instance-00000054 to attach disk [datastore1] volume-35d065ad-5735-4fa5-b238-762eaceed4e2/volume-35d065ad-5735-4fa5-b238-762eaceed4e2.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1774.115224] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-789dc712-1ecc-4f96-8c8f-ec32f3869390 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.134994] env[62619]: DEBUG oslo_vmware.api [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1774.134994] env[62619]: value = "task-1778278" [ 1774.134994] env[62619]: _type = "Task" [ 1774.134994] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.149612] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 5cf7ca57-351f-48ab-8758-b30f50cd607f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1774.152301] env[62619]: DEBUG oslo_vmware.api [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778278, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.161739] env[62619]: DEBUG nova.compute.manager [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1774.339264] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.538064] env[62619]: DEBUG nova.compute.manager [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1774.538326] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1774.539556] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4670421-ff9f-4d8c-8c48-5b768375c394 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.548087] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1774.548220] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1006df3d-e3cd-4e4c-90f0-376746786fe7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.555736] env[62619]: DEBUG oslo_vmware.api [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1774.555736] env[62619]: value = "task-1778279" [ 1774.555736] env[62619]: _type = "Task" [ 1774.555736] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.570601] env[62619]: DEBUG oslo_vmware.api [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778276, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.576516] env[62619]: DEBUG oslo_vmware.api [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.592441] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "0987b6ec-2587-4f15-adbb-f563e19ecce9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.593442] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "0987b6ec-2587-4f15-adbb-f563e19ecce9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.594271] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778277, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.610119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "refresh_cache-848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.610604] env[62619]: DEBUG nova.compute.manager [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Instance network_info: |[{"id": "e81bada3-ed22-4c30-a4dd-4c371831520b", "address": "fa:16:3e:39:fc:f8", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape81bada3-ed", "ovs_interfaceid": "e81bada3-ed22-4c30-a4dd-4c371831520b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1774.610982] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:fc:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e81bada3-ed22-4c30-a4dd-4c371831520b', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1774.620321] env[62619]: DEBUG oslo.service.loopingcall [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.621319] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1774.621629] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90cbbef8-2e9f-42e6-a7fd-d4dd85d20df0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.652076] env[62619]: DEBUG oslo_vmware.api [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778278, 'name': ReconfigVM_Task, 'duration_secs': 0.238311} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.653094] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369135', 'volume_id': '35d065ad-5735-4fa5-b238-762eaceed4e2', 'name': 'volume-35d065ad-5735-4fa5-b238-762eaceed4e2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '917960ca-3870-4e4e-aafe-3c6d77cf7c51', 'attached_at': '', 'detached_at': '', 'volume_id': '35d065ad-5735-4fa5-b238-762eaceed4e2', 'serial': '35d065ad-5735-4fa5-b238-762eaceed4e2'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1774.658015] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 52b29fef-eab6-4541-a570-af9c0c021a75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1774.658015] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1774.658015] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3264MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1774.658223] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1774.658223] env[62619]: value = "task-1778280" [ 1774.658223] env[62619]: _type = "Task" [ 1774.658223] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.672799] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778280, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.688640] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.834096] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.965547] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a06472-b6ce-489f-b3dd-ae577ec2dfe5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.975424] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f63820a-7002-4bcd-a680-32413f0c87f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.010043] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289e9884-a4ad-4ec1-a674-e2ba42297994 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.019625] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b138ea-964c-4d4c-8c88-7ba900b8f45c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.036325] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1775.065112] env[62619]: DEBUG oslo_vmware.api [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778276, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.072495] env[62619]: DEBUG oslo_vmware.api [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778279, 'name': PowerOffVM_Task, 'duration_secs': 0.21123} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.072854] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1775.073071] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1775.073361] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88d30e7b-d2e2-4333-9b5a-28e69404130c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.092502] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778277, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.097193] env[62619]: DEBUG nova.compute.manager [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1775.172229] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778280, 'name': CreateVM_Task, 'duration_secs': 0.44701} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.172458] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1775.173495] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.173495] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.173704] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1775.174371] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e704619-6f4d-4830-aae9-f52fc3f8e77e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.181083] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1775.181083] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5272ce0f-d77e-77bd-87c4-16c61b0490cb" [ 1775.181083] env[62619]: _type = "Task" [ 1775.181083] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.191848] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5272ce0f-d77e-77bd-87c4-16c61b0490cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.193390] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1775.193619] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1775.193810] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleting the datastore file [datastore1] 7cb51b51-514d-4223-a82a-5cdbdab9482a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1775.194080] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba722fd2-607f-4f1a-9ebd-65c76e7e352b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.201829] env[62619]: DEBUG oslo_vmware.api [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1775.201829] env[62619]: value = "task-1778282" [ 1775.201829] env[62619]: _type = "Task" [ 1775.201829] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.214489] env[62619]: DEBUG oslo_vmware.api [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.336097] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.539822] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1775.562796] env[62619]: DEBUG oslo_vmware.api [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778276, 'name': PowerOnVM_Task, 'duration_secs': 1.039472} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.563072] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Resumed the VM {{(pid=62619) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1775.563437] env[62619]: DEBUG nova.compute.manager [None req-13e3a4a0-1989-4934-9b62-2a6c4344e7d4 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1775.564215] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64805a6-305d-4159-99ac-131a2b86e15b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.592790] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778277, 'name': CreateSnapshot_Task, 'duration_secs': 1.368573} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.593086] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1775.593903] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d98dcc-16a4-4775-9208-6c81a5875018 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.621291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.691626] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5272ce0f-d77e-77bd-87c4-16c61b0490cb, 'name': SearchDatastore_Task, 'duration_secs': 0.0406} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.692147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.692432] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1775.692713] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.692967] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.693161] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1775.693429] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a062aa0f-9aea-416f-ad0d-e64787733eb8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.700485] env[62619]: DEBUG nova.objects.instance [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'flavor' on Instance uuid 917960ca-3870-4e4e-aafe-3c6d77cf7c51 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1775.707037] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1775.707220] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1775.708784] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2f8f631-c6a9-4b9a-95b4-d8068ec78e66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.714593] env[62619]: DEBUG oslo_vmware.api [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778282, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.440764} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.716089] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1775.716378] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1775.716595] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1775.716898] env[62619]: INFO nova.compute.manager [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1775.717195] env[62619]: DEBUG oslo.service.loopingcall [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1775.717791] env[62619]: DEBUG nova.compute.manager [-] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1775.717954] env[62619]: DEBUG nova.network.neutron [-] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1775.721386] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1775.721386] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5238afa5-3bde-2aef-d3df-22766028a1e8" [ 1775.721386] env[62619]: _type = "Task" [ 1775.721386] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.731195] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5238afa5-3bde-2aef-d3df-22766028a1e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.835224] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.028401] env[62619]: DEBUG nova.compute.manager [req-d498e436-930f-4a19-ba30-e76c87be5e85 req-783a7011-0940-4308-bbfa-b096fe25d5de service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Received event network-vif-deleted-9567749e-9f9f-4b05-a445-3099dd2cdff8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1776.028795] env[62619]: INFO nova.compute.manager [req-d498e436-930f-4a19-ba30-e76c87be5e85 req-783a7011-0940-4308-bbfa-b096fe25d5de service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Neutron deleted interface 9567749e-9f9f-4b05-a445-3099dd2cdff8; detaching it from the instance and deleting it from the info cache [ 1776.029071] env[62619]: DEBUG nova.network.neutron [req-d498e436-930f-4a19-ba30-e76c87be5e85 req-783a7011-0940-4308-bbfa-b096fe25d5de service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.044370] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1776.044523] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.450s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.044731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.853s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1776.045063] env[62619]: DEBUG nova.objects.instance [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'pci_requests' on Instance uuid 5cf7ca57-351f-48ab-8758-b30f50cd607f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1776.047132] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1776.047132] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Cleaning up deleted instances {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11609}} [ 1776.118305] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1776.119022] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4ff311be-11ec-4ee5-b2f5-51caf88f7e30 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.127989] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1776.127989] env[62619]: value = "task-1778283" [ 1776.127989] env[62619]: _type = "Task" [ 1776.127989] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.137584] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778283, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.205514] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c94f120c-cff6-4a24-9b02-c19b2c80901c tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.301s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.232828] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5238afa5-3bde-2aef-d3df-22766028a1e8, 'name': SearchDatastore_Task, 'duration_secs': 0.034445} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.233637] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0142381-35b2-4cb4-8993-6e689de2ea85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.240178] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1776.240178] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52800631-5f4d-377c-c5fa-620ce7e37cb9" [ 1776.240178] env[62619]: _type = "Task" [ 1776.240178] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.249439] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52800631-5f4d-377c-c5fa-620ce7e37cb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.335835] env[62619]: DEBUG oslo_vmware.api [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778269, 'name': ReconfigVM_Task, 'duration_secs': 5.821308} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.336205] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.336473] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Reconfigured VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1776.493350] env[62619]: DEBUG nova.network.neutron [-] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.532192] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af6528ad-fcb9-48a7-8448-c11906c1b746 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.547217] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6e425b-5d40-4d12-84e2-d54cedf8edea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.583859] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] There are 65 instances to clean {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11618}} [ 1776.583992] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: b84dd91a-8e08-4476-9683-655357d18370] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1776.587255] env[62619]: DEBUG nova.objects.instance [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'numa_topology' on Instance uuid 5cf7ca57-351f-48ab-8758-b30f50cd607f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1776.623823] env[62619]: DEBUG nova.compute.manager [req-d498e436-930f-4a19-ba30-e76c87be5e85 req-783a7011-0940-4308-bbfa-b096fe25d5de service nova] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Detach interface failed, port_id=9567749e-9f9f-4b05-a445-3099dd2cdff8, reason: Instance 7cb51b51-514d-4223-a82a-5cdbdab9482a could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1776.643300] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778283, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.752101] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52800631-5f4d-377c-c5fa-620ce7e37cb9, 'name': SearchDatastore_Task, 'duration_secs': 0.0183} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.752397] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.752663] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4/848da7a1-9cec-4715-bbe1-ef7a51b3a5c4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1776.752992] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df1db2f3-b87a-496a-9adb-92c664e34575 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.760970] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1776.760970] env[62619]: value = "task-1778284" [ 1776.760970] env[62619]: _type = "Task" [ 1776.760970] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.772906] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778284, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.996965] env[62619]: INFO nova.compute.manager [-] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Took 1.28 seconds to deallocate network for instance. [ 1777.090028] env[62619]: INFO nova.compute.claims [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1777.093065] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: a3101076-36d6-409a-8072-638107e63073] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1777.142499] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778283, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.271960] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778284, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.504633] env[62619]: DEBUG oslo_concurrency.lockutils [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.574819] env[62619]: DEBUG nova.compute.manager [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1777.598704] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 34180944-02f7-4115-8178-64f2f2591080] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1777.642527] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778283, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.671353] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1777.671458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1777.671640] env[62619]: DEBUG nova.network.neutron [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1777.772952] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778284, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.629424} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.773233] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4/848da7a1-9cec-4715-bbe1-ef7a51b3a5c4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1777.773462] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1777.773701] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-52a9c867-fe26-443f-94a1-6de02e6b2bb1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.782118] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1777.782118] env[62619]: value = "task-1778285" [ 1777.782118] env[62619]: _type = "Task" [ 1777.782118] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.792711] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778285, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.960932] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.961121] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.961340] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.961629] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.961808] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.965260] env[62619]: INFO nova.compute.manager [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Terminating instance [ 1778.060288] env[62619]: DEBUG nova.compute.manager [req-f9c90c70-7433-4db3-b571-fe5356135cfe req-2c48fde3-5b75-4053-869c-fe90fd1317ee service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Received event network-vif-deleted-f1f8853f-2de2-45f7-8853-4591e89cac0b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1778.060746] env[62619]: INFO nova.compute.manager [req-f9c90c70-7433-4db3-b571-fe5356135cfe req-2c48fde3-5b75-4053-869c-fe90fd1317ee service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Neutron deleted interface f1f8853f-2de2-45f7-8853-4591e89cac0b; detaching it from the instance and deleting it from the info cache [ 1778.061079] env[62619]: DEBUG nova.network.neutron [req-f9c90c70-7433-4db3-b571-fe5356135cfe req-2c48fde3-5b75-4053-869c-fe90fd1317ee service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updating instance_info_cache with network_info: [{"id": "db7fd312-3521-4a87-9acc-4d86d518b63c", "address": "fa:16:3e:14:06:4e", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb7fd312-35", "ovs_interfaceid": "db7fd312-3521-4a87-9acc-4d86d518b63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1778.097230] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.104507] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e34a8173-952b-4ddc-90cf-3681387733fa] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1778.146213] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778283, 'name': CloneVM_Task, 'duration_secs': 1.940516} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.146213] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Created linked-clone VM from snapshot [ 1778.146213] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0998bce9-6ebd-41f1-9c1a-a1a3d05948fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.160496] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Uploading image 1b367f3f-dbe9-40fa-82bb-59204b059dc3 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1778.188625] env[62619]: DEBUG oslo_vmware.rw_handles [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1778.188625] env[62619]: value = "vm-369140" [ 1778.188625] env[62619]: _type = "VirtualMachine" [ 1778.188625] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1778.188997] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-92d7261d-62e3-464a-b6bd-a3d4282dcaee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.201543] env[62619]: DEBUG oslo_vmware.rw_handles [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lease: (returnval){ [ 1778.201543] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d47e75-3f48-e919-1c1f-8164e7b45496" [ 1778.201543] env[62619]: _type = "HttpNfcLease" [ 1778.201543] env[62619]: } obtained for exporting VM: (result){ [ 1778.201543] env[62619]: value = "vm-369140" [ 1778.201543] env[62619]: _type = "VirtualMachine" [ 1778.201543] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1778.201543] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the lease: (returnval){ [ 1778.201543] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d47e75-3f48-e919-1c1f-8164e7b45496" [ 1778.201543] env[62619]: _type = "HttpNfcLease" [ 1778.201543] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1778.216046] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1778.216046] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d47e75-3f48-e919-1c1f-8164e7b45496" [ 1778.216046] env[62619]: _type = "HttpNfcLease" [ 1778.216046] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1778.216046] env[62619]: DEBUG oslo_vmware.rw_handles [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1778.216046] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d47e75-3f48-e919-1c1f-8164e7b45496" [ 1778.216046] env[62619]: _type = "HttpNfcLease" [ 1778.216046] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1778.216367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2814224b-c5cf-4afa-95cc-4c1e9d90601c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.226647] env[62619]: DEBUG oslo_vmware.rw_handles [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5217e6db-dbb2-045e-b64b-c7ef164e06ea/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1778.226850] env[62619]: DEBUG oslo_vmware.rw_handles [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5217e6db-dbb2-045e-b64b-c7ef164e06ea/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1778.299181] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778285, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145775} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.299370] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1778.300247] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3362a07b-e19e-4097-b28b-b9bbd47ecb91 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.328718] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4/848da7a1-9cec-4715-bbe1-ef7a51b3a5c4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1778.332148] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8697fbab-726f-4070-9409-d28691524525 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.353988] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-aca981b7-fe44-497e-a74b-980a127ec166 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.360831] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1778.360831] env[62619]: value = "task-1778287" [ 1778.360831] env[62619]: _type = "Task" [ 1778.360831] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.375871] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778287, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.471349] env[62619]: DEBUG nova.compute.manager [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1778.471630] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1778.472973] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e91e27d-afb2-4bff-b640-0b0db1f49bf8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.482693] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1778.483026] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97abda9b-bee6-41b8-84db-efd9bff0d2cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.488344] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9dd146-61f0-4fae-84bf-cbc6c14ed775 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.492757] env[62619]: DEBUG oslo_vmware.api [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1778.492757] env[62619]: value = "task-1778288" [ 1778.492757] env[62619]: _type = "Task" [ 1778.492757] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.499531] env[62619]: INFO nova.network.neutron [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Port f1f8853f-2de2-45f7-8853-4591e89cac0b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1778.499898] env[62619]: DEBUG nova.network.neutron [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updating instance_info_cache with network_info: [{"id": "db7fd312-3521-4a87-9acc-4d86d518b63c", "address": "fa:16:3e:14:06:4e", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb7fd312-35", "ovs_interfaceid": "db7fd312-3521-4a87-9acc-4d86d518b63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1778.504659] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397d808d-29d4-48c0-8526-15550f87cf01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.509716] env[62619]: DEBUG oslo_vmware.api [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778288, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.541319] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c057e0-f6e1-4b08-ba91-df4d5b4a234f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.551248] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfd1676-ac90-4ef1-8907-826525ed718c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.572437] env[62619]: DEBUG oslo_concurrency.lockutils [req-f9c90c70-7433-4db3-b571-fe5356135cfe req-2c48fde3-5b75-4053-869c-fe90fd1317ee service nova] Acquiring lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1778.573070] env[62619]: DEBUG nova.compute.provider_tree [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1778.607995] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: b3d9c418-f521-4770-a381-5238be6cc33c] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1778.872497] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778287, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.004265] env[62619]: DEBUG oslo_vmware.api [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778288, 'name': PowerOffVM_Task, 'duration_secs': 0.232347} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.004929] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1779.004929] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1779.005341] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-060009d5-6a6c-43ec-ac2b-05d049d563df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.010574] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.076698] env[62619]: DEBUG nova.scheduler.client.report [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1779.097802] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1779.097802] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1779.097802] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleting the datastore file [datastore1] 4983b333-debb-4a2b-b28d-b321f0d8d7d7 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1779.097802] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a29a6997-6c73-4189-bd08-8aafac350c33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.108041] env[62619]: DEBUG oslo_vmware.api [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1779.108041] env[62619]: value = "task-1778290" [ 1779.108041] env[62619]: _type = "Task" [ 1779.108041] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.112381] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 8c296f2c-3e47-4431-b0c0-f7f1706c4a12] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1779.120640] env[62619]: DEBUG oslo_vmware.api [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.185072] env[62619]: DEBUG nova.objects.instance [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lazy-loading 'flavor' on Instance uuid 32aed8cd-1583-4253-bfb6-a98610e2f32e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1779.373043] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778287, 'name': ReconfigVM_Task, 'duration_secs': 0.691572} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.374426] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4/848da7a1-9cec-4715-bbe1-ef7a51b3a5c4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1779.374426] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5b55db1-0b7e-4bf7-9c48-1c13919460a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.384164] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1779.384164] env[62619]: value = "task-1778291" [ 1779.384164] env[62619]: _type = "Task" [ 1779.384164] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.395804] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778291, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.517907] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11277f6c-912f-4860-8225-56af38ea6013 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-4983b333-debb-4a2b-b28d-b321f0d8d7d7-f1f8853f-2de2-45f7-8853-4591e89cac0b" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.778s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.582909] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.537s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.584652] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.385s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.584983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.587075] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.704s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.587369] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.589806] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.901s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.591564] env[62619]: INFO nova.compute.claims [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1779.616138] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: cb33580e-d70d-4557-98fe-e673d93f3307] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1779.622550] env[62619]: DEBUG oslo_vmware.api [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378835} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.624653] env[62619]: INFO nova.scheduler.client.report [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Deleted allocations for instance 4858096a-9683-4a7c-bbeb-4e6b2f5401cf [ 1779.626373] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1779.626808] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1779.626984] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1779.628318] env[62619]: INFO nova.compute.manager [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1779.628587] env[62619]: DEBUG oslo.service.loopingcall [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1779.632354] env[62619]: DEBUG nova.compute.manager [-] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1779.632354] env[62619]: DEBUG nova.network.neutron [-] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1779.634808] env[62619]: INFO nova.network.neutron [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updating port 89e91bb9-2bd5-4385-b3dd-cee4612bb166 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1779.661233] env[62619]: DEBUG oslo_concurrency.lockutils [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "11869077-b428-413f-9f8f-7eac08d2d9ec" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.662031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.689220] env[62619]: DEBUG oslo_concurrency.lockutils [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquiring lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.689395] env[62619]: DEBUG oslo_concurrency.lockutils [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquired lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.899513] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778291, 'name': Rename_Task, 'duration_secs': 0.191586} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.900277] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1779.900277] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b9b59ce-b9aa-460e-aa41-107adb27f386 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.914140] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1779.914140] env[62619]: value = "task-1778292" [ 1779.914140] env[62619]: _type = "Task" [ 1779.914140] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.924355] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778292, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.114090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31ba1d9a-f2ce-4c77-adf2-01fbe0f549aa tempest-ServerActionsV293TestJSON-762184658 tempest-ServerActionsV293TestJSON-762184658-project-member] Lock "f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.328s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.127018] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 20d62152-3859-4023-a11d-b17c76e1090a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1780.141871] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae7143d5-db44-4ece-8871-c232b39b2140 tempest-FloatingIPsAssociationTestJSON-72525605 tempest-FloatingIPsAssociationTestJSON-72525605-project-member] Lock "4858096a-9683-4a7c-bbeb-4e6b2f5401cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.217s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.165606] env[62619]: INFO nova.compute.manager [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Detaching volume b0427cd3-da5b-4bb2-96ed-03dc601c6f51 [ 1780.215542] env[62619]: INFO nova.virt.block_device [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Attempting to driver detach volume b0427cd3-da5b-4bb2-96ed-03dc601c6f51 from mountpoint /dev/sdb [ 1780.216036] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1780.216113] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369108', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'name': 'volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '11869077-b428-413f-9f8f-7eac08d2d9ec', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'serial': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1780.217126] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed39718-0703-4368-9730-ed658f1a10a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.242620] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286d9a41-41c1-480b-bc46-b7f1231dfdf7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.251780] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a074f3e1-ec4b-4c79-acd7-ba9ab3b2f3f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.275574] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd48dc19-d537-4646-8778-abaf4275e439 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.293709] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] The volume has not been displaced from its original location: [datastore1] volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51/volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1780.299746] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfiguring VM instance instance-0000004b to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1780.300968] env[62619]: DEBUG nova.network.neutron [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1780.302447] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdebb7db-5dd1-4207-8f3e-115fd622c85c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.324685] env[62619]: DEBUG oslo_vmware.api [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1780.324685] env[62619]: value = "task-1778293" [ 1780.324685] env[62619]: _type = "Task" [ 1780.324685] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.337315] env[62619]: DEBUG oslo_vmware.api [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778293, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.428483] env[62619]: DEBUG oslo_vmware.api [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778292, 'name': PowerOnVM_Task, 'duration_secs': 0.517394} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.428651] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1780.429016] env[62619]: INFO nova.compute.manager [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Took 10.12 seconds to spawn the instance on the hypervisor. [ 1780.429274] env[62619]: DEBUG nova.compute.manager [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1780.430276] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e692389f-18f2-426b-8943-89345253b960 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.631777] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: d0258646-e687-4198-b7c8-7bd116e3bf18] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1780.768115] env[62619]: DEBUG nova.compute.manager [req-3eaad369-95e4-4a33-8178-8adbd8620db5 req-cc8f2740-ea6b-4a0c-a823-5955a78afbd2 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Received event network-changed-2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1780.768948] env[62619]: DEBUG nova.compute.manager [req-3eaad369-95e4-4a33-8178-8adbd8620db5 req-cc8f2740-ea6b-4a0c-a823-5955a78afbd2 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Refreshing instance network info cache due to event network-changed-2924458a-bf48-482f-ab31-ad34e83a94d4. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1780.769308] env[62619]: DEBUG oslo_concurrency.lockutils [req-3eaad369-95e4-4a33-8178-8adbd8620db5 req-cc8f2740-ea6b-4a0c-a823-5955a78afbd2 service nova] Acquiring lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.834993] env[62619]: DEBUG oslo_vmware.api [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778293, 'name': ReconfigVM_Task, 'duration_secs': 0.314281} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.837678] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Reconfigured VM instance instance-0000004b to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1780.845057] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-337df1e8-3d51-4c41-8477-7969df9d657c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.854463] env[62619]: DEBUG nova.compute.manager [req-8e6cdb0c-65fa-4c63-b0c8-7df09bb43dc3 req-575d8097-afe8-45ec-99e6-9c4e4a789ffd service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Received event network-vif-deleted-db7fd312-3521-4a87-9acc-4d86d518b63c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1780.854666] env[62619]: INFO nova.compute.manager [req-8e6cdb0c-65fa-4c63-b0c8-7df09bb43dc3 req-575d8097-afe8-45ec-99e6-9c4e4a789ffd service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Neutron deleted interface db7fd312-3521-4a87-9acc-4d86d518b63c; detaching it from the instance and deleting it from the info cache [ 1780.854837] env[62619]: DEBUG nova.network.neutron [req-8e6cdb0c-65fa-4c63-b0c8-7df09bb43dc3 req-575d8097-afe8-45ec-99e6-9c4e4a789ffd service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.873658] env[62619]: DEBUG oslo_vmware.api [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1780.873658] env[62619]: value = "task-1778294" [ 1780.873658] env[62619]: _type = "Task" [ 1780.873658] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.886180] env[62619]: DEBUG oslo_vmware.api [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778294, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.921860] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7ccf25-0d27-48c8-90ba-c134a15d1bc1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.932405] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58cf522-5754-4d1e-bb8d-18bfe8ad7ac3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.979851] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dacd2d0a-b1f8-4a1b-b905-5b1ee643a16a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.982951] env[62619]: INFO nova.compute.manager [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Took 18.88 seconds to build instance. [ 1780.989889] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7fa513-2067-41e9-9951-29c120ed6335 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.004486] env[62619]: DEBUG nova.compute.provider_tree [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1781.009924] env[62619]: DEBUG nova.network.neutron [-] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.135935] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 1257a23e-3beb-4357-9322-4b84c87d0c35] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1781.197337] env[62619]: DEBUG nova.network.neutron [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updating instance_info_cache with network_info: [{"id": "2924458a-bf48-482f-ab31-ad34e83a94d4", "address": "fa:16:3e:c5:5c:af", "network": {"id": "c853257c-5523-4c7c-ac39-b96dd377e1fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1460025320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767e455ac6ef43d1b587e3e953ed8a9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2924458a-bf", "ovs_interfaceid": "2924458a-bf48-482f-ab31-ad34e83a94d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.368704] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-737af981-a0b4-43ee-b5b1-87653268e9ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.377449] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a4ef52-fab0-45e1-8214-19410b7b8b54 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.402429] env[62619]: DEBUG oslo_vmware.api [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778294, 'name': ReconfigVM_Task, 'duration_secs': 0.178165} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.402769] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369108', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'name': 'volume-b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '11869077-b428-413f-9f8f-7eac08d2d9ec', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51', 'serial': 'b0427cd3-da5b-4bb2-96ed-03dc601c6f51'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1781.420910] env[62619]: DEBUG nova.compute.manager [req-8e6cdb0c-65fa-4c63-b0c8-7df09bb43dc3 req-575d8097-afe8-45ec-99e6-9c4e4a789ffd service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Detach interface failed, port_id=db7fd312-3521-4a87-9acc-4d86d518b63c, reason: Instance 4983b333-debb-4a2b-b28d-b321f0d8d7d7 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1781.485509] env[62619]: DEBUG oslo_concurrency.lockutils [None req-07b4d0e1-5a8a-4b95-bd0c-03ca85f7be5a tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.388s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.494145] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.494356] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.494589] env[62619]: DEBUG nova.network.neutron [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1781.511270] env[62619]: DEBUG nova.scheduler.client.report [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1781.515289] env[62619]: INFO nova.compute.manager [-] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Took 1.88 seconds to deallocate network for instance. [ 1781.642030] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: fdde42eb-766c-4549-aae5-f7b1a1097cc6] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1781.699747] env[62619]: DEBUG oslo_concurrency.lockutils [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Releasing lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.700190] env[62619]: DEBUG nova.compute.manager [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Inject network info {{(pid=62619) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7600}} [ 1781.700658] env[62619]: DEBUG nova.compute.manager [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] network_info to inject: |[{"id": "2924458a-bf48-482f-ab31-ad34e83a94d4", "address": "fa:16:3e:c5:5c:af", "network": {"id": "c853257c-5523-4c7c-ac39-b96dd377e1fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1460025320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767e455ac6ef43d1b587e3e953ed8a9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2924458a-bf", "ovs_interfaceid": "2924458a-bf48-482f-ab31-ad34e83a94d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7601}} [ 1781.709087] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Reconfiguring VM instance to set the machine id {{(pid=62619) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1781.709556] env[62619]: DEBUG oslo_concurrency.lockutils [req-3eaad369-95e4-4a33-8178-8adbd8620db5 req-cc8f2740-ea6b-4a0c-a823-5955a78afbd2 service nova] Acquired lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.709875] env[62619]: DEBUG nova.network.neutron [req-3eaad369-95e4-4a33-8178-8adbd8620db5 req-cc8f2740-ea6b-4a0c-a823-5955a78afbd2 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Refreshing network info cache for port 2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1781.712371] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a0d1c1c-6379-42b0-b3fa-5237b6460e12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.753333] env[62619]: DEBUG oslo_vmware.api [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1781.753333] env[62619]: value = "task-1778295" [ 1781.753333] env[62619]: _type = "Task" [ 1781.753333] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.766329] env[62619]: DEBUG oslo_vmware.api [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778295, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.967679] env[62619]: DEBUG nova.objects.instance [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lazy-loading 'flavor' on Instance uuid 11869077-b428-413f-9f8f-7eac08d2d9ec {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1782.016291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.016833] env[62619]: DEBUG nova.compute.manager [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1782.020061] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.399s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.021547] env[62619]: INFO nova.compute.claims [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1782.025704] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.145429] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: cbff225f-2d11-4a43-a320-95dd3afb8e48] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1782.252082] env[62619]: DEBUG oslo_concurrency.lockutils [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.252082] env[62619]: DEBUG oslo_concurrency.lockutils [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.252082] env[62619]: DEBUG oslo_concurrency.lockutils [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.252082] env[62619]: DEBUG oslo_concurrency.lockutils [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.252082] env[62619]: DEBUG oslo_concurrency.lockutils [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.253231] env[62619]: INFO nova.compute.manager [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Terminating instance [ 1782.269604] env[62619]: DEBUG oslo_vmware.api [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778295, 'name': ReconfigVM_Task, 'duration_secs': 0.231928} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.269604] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-47755b7b-e4e5-4322-bb86-8860e7bc0a3a tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Reconfigured VM instance to set the machine id {{(pid=62619) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1782.317019] env[62619]: DEBUG oslo_vmware.rw_handles [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fbf436-21c1-4783-7263-95a20026492b/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1782.317019] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82880930-2164-4970-9644-d31436e131f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.322716] env[62619]: DEBUG oslo_vmware.rw_handles [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fbf436-21c1-4783-7263-95a20026492b/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1782.322881] env[62619]: ERROR oslo_vmware.rw_handles [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fbf436-21c1-4783-7263-95a20026492b/disk-0.vmdk due to incomplete transfer. [ 1782.323131] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f2312ebb-f660-4f8b-89e2-d776b92a3357 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.334647] env[62619]: DEBUG oslo_vmware.rw_handles [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fbf436-21c1-4783-7263-95a20026492b/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1782.335399] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Uploaded image 4589a8a5-619c-4fa5-ba93-0bf4eb7ad36a to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1782.338862] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1782.339529] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6840cc54-42ff-4074-8fa8-a11297f60748 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.347299] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1782.347299] env[62619]: value = "task-1778296" [ 1782.347299] env[62619]: _type = "Task" [ 1782.347299] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.357482] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778296, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.463055] env[62619]: DEBUG nova.network.neutron [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updating instance_info_cache with network_info: [{"id": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "address": "fa:16:3e:78:49:c7", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e91bb9-2b", "ovs_interfaceid": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.529812] env[62619]: DEBUG nova.compute.utils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1782.531458] env[62619]: DEBUG nova.compute.manager [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1782.532023] env[62619]: DEBUG nova.network.neutron [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1782.593992] env[62619]: DEBUG nova.policy [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cbe4a8a89e44105969767acfcf8764d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb5d393c514d41f78fd4ea45d2f888a8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1782.614310] env[62619]: DEBUG nova.objects.instance [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lazy-loading 'flavor' on Instance uuid 32aed8cd-1583-4253-bfb6-a98610e2f32e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1782.649176] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 7ee5f09f-e27b-4373-88ce-8cff2f55a2b9] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1782.729371] env[62619]: DEBUG nova.network.neutron [req-3eaad369-95e4-4a33-8178-8adbd8620db5 req-cc8f2740-ea6b-4a0c-a823-5955a78afbd2 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updated VIF entry in instance network info cache for port 2924458a-bf48-482f-ab31-ad34e83a94d4. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1782.730015] env[62619]: DEBUG nova.network.neutron [req-3eaad369-95e4-4a33-8178-8adbd8620db5 req-cc8f2740-ea6b-4a0c-a823-5955a78afbd2 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updating instance_info_cache with network_info: [{"id": "2924458a-bf48-482f-ab31-ad34e83a94d4", "address": "fa:16:3e:c5:5c:af", "network": {"id": "c853257c-5523-4c7c-ac39-b96dd377e1fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1460025320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767e455ac6ef43d1b587e3e953ed8a9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2924458a-bf", "ovs_interfaceid": "2924458a-bf48-482f-ab31-ad34e83a94d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.766290] env[62619]: DEBUG nova.compute.manager [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1782.766290] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1782.766528] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fda9e9-1bb6-45f3-9df9-cd03d83c24f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.784377] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1782.784713] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e2be562-3cfe-4b2d-ad10-8059727acda5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.791523] env[62619]: DEBUG oslo_vmware.api [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1782.791523] env[62619]: value = "task-1778297" [ 1782.791523] env[62619]: _type = "Task" [ 1782.791523] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.802470] env[62619]: DEBUG oslo_vmware.api [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.855727] env[62619]: DEBUG nova.compute.manager [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received event network-vif-plugged-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1782.856190] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] Acquiring lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.856466] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.856678] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.856951] env[62619]: DEBUG nova.compute.manager [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] No waiting events found dispatching network-vif-plugged-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1782.857201] env[62619]: WARNING nova.compute.manager [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received unexpected event network-vif-plugged-89e91bb9-2bd5-4385-b3dd-cee4612bb166 for instance with vm_state shelved_offloaded and task_state spawning. [ 1782.857480] env[62619]: DEBUG nova.compute.manager [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received event network-changed-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1782.857727] env[62619]: DEBUG nova.compute.manager [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Refreshing instance network info cache due to event network-changed-89e91bb9-2bd5-4385-b3dd-cee4612bb166. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1782.857938] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] Acquiring lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.865765] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778296, 'name': Destroy_Task, 'duration_secs': 0.417826} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.866461] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Destroyed the VM [ 1782.866794] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1782.867148] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-318e7963-f067-4888-b3cf-de1e9929d76c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.874969] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1782.874969] env[62619]: value = "task-1778298" [ 1782.874969] env[62619]: _type = "Task" [ 1782.874969] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.884295] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778298, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.903138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.903138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.903138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.903424] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.903623] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.905949] env[62619]: INFO nova.compute.manager [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Terminating instance [ 1782.921308] env[62619]: DEBUG nova.network.neutron [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Successfully created port: 165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1782.966984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.969672] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] Acquired lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1782.969927] env[62619]: DEBUG nova.network.neutron [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Refreshing network info cache for port 89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1782.976394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-815037f0-90ce-4083-8b40-112f8d0a95ae tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.315s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.004784] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='5d8d0823d14107ed3536e602b2cea7b7',container_format='bare',created_at=2024-12-11T22:55:13Z,direct_url=,disk_format='vmdk',id=d1791572-abf0-49e9-9ccd-ae11e1d9d561,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1206145745-shelved',owner='0a8f5f9386ba4dfa869c288a30aaeada',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2024-12-11T22:55:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1783.005158] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1783.005287] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1783.005489] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1783.005662] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1783.005817] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1783.007383] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1783.007658] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1783.007884] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1783.008168] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1783.008372] env[62619]: DEBUG nova.virt.hardware [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1783.009762] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00474a0-f38c-4c25-9ea0-a3f82e48a2d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.021961] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6361dcf-2d15-430a-a291-ce5f402767ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.040603] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:49:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89e91bb9-2bd5-4385-b3dd-cee4612bb166', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1783.049849] env[62619]: DEBUG oslo.service.loopingcall [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1783.051040] env[62619]: DEBUG nova.compute.manager [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1783.053813] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1783.054355] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd06f45d-4873-4d6d-ad8f-a751b3c5d1f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.079902] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1783.079902] env[62619]: value = "task-1778299" [ 1783.079902] env[62619]: _type = "Task" [ 1783.079902] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.090634] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778299, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.119747] env[62619]: DEBUG oslo_concurrency.lockutils [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquiring lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.153194] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 769905db-d19a-411f-bb5d-8196056b82aa] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1783.233371] env[62619]: DEBUG oslo_concurrency.lockutils [req-3eaad369-95e4-4a33-8178-8adbd8620db5 req-cc8f2740-ea6b-4a0c-a823-5955a78afbd2 service nova] Releasing lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.234241] env[62619]: DEBUG oslo_concurrency.lockutils [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquired lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.305124] env[62619]: DEBUG oslo_vmware.api [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778297, 'name': PowerOffVM_Task, 'duration_secs': 0.262689} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.309815] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1783.309815] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1783.309815] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82dd2c2c-5acb-40c1-a701-a582130dbe0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.363645] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8cfd7d-d5a1-48ef-ab21-e0bb51501f84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.372445] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4a4e1c-73c4-43b4-8ae3-ae58a4a2481c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.385826] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778298, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.414917] env[62619]: DEBUG nova.compute.manager [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1783.415224] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1783.416359] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18c3d1f-a2e4-4f88-82f1-35a988cb3df0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.419796] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0d573e-592c-4e5e-b1e2-cdbec1e8ed63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.430554] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1783.430902] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf26ced6-aa0f-4f59-9b83-763addfa9b7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.433857] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbf37fe-bf4a-4f6a-9387-09e3b38ee8e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.441667] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1783.441933] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1783.442136] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleting the datastore file [datastore1] 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1783.442399] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b74caec-f450-427d-8cdf-fee9ce1de1b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.472925] env[62619]: DEBUG nova.compute.provider_tree [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1783.479978] env[62619]: DEBUG oslo_vmware.api [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1783.479978] env[62619]: value = "task-1778301" [ 1783.479978] env[62619]: _type = "Task" [ 1783.479978] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.485973] env[62619]: DEBUG oslo_vmware.api [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1783.485973] env[62619]: value = "task-1778302" [ 1783.485973] env[62619]: _type = "Task" [ 1783.485973] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.494093] env[62619]: DEBUG oslo_vmware.api [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.500455] env[62619]: DEBUG oslo_vmware.api [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778302, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.590743] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778299, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.659835] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 0f925028-c376-438f-8a56-deaa23047199] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1783.708224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "11869077-b428-413f-9f8f-7eac08d2d9ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.709827] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.709827] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "11869077-b428-413f-9f8f-7eac08d2d9ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.709827] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.709827] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.711204] env[62619]: INFO nova.compute.manager [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Terminating instance [ 1783.862530] env[62619]: DEBUG nova.network.neutron [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updated VIF entry in instance network info cache for port 89e91bb9-2bd5-4385-b3dd-cee4612bb166. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1783.862962] env[62619]: DEBUG nova.network.neutron [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updating instance_info_cache with network_info: [{"id": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "address": "fa:16:3e:78:49:c7", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e91bb9-2b", "ovs_interfaceid": "89e91bb9-2bd5-4385-b3dd-cee4612bb166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1783.887069] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778298, 'name': RemoveSnapshot_Task, 'duration_secs': 0.681525} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.887337] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1783.887624] env[62619]: DEBUG nova.compute.manager [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1783.888433] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9c78ee-8e78-4bdc-af5d-4554519dbf42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.960597] env[62619]: DEBUG nova.network.neutron [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1783.980877] env[62619]: DEBUG nova.scheduler.client.report [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1784.003436] env[62619]: DEBUG oslo_vmware.api [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778301, 'name': PowerOffVM_Task, 'duration_secs': 0.440027} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.006631] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1784.006811] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1784.007478] env[62619]: DEBUG oslo_vmware.api [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778302, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.427788} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.007688] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c9ff0d3-95f2-4246-953c-5567fd6b5c80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.011996] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1784.011996] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1784.011996] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1784.011996] env[62619]: INFO nova.compute.manager [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1784.011996] env[62619]: DEBUG oslo.service.loopingcall [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1784.011996] env[62619]: DEBUG nova.compute.manager [-] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1784.011996] env[62619]: DEBUG nova.network.neutron [-] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1784.062287] env[62619]: DEBUG nova.compute.manager [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1784.097124] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778299, 'name': CreateVM_Task, 'duration_secs': 0.821888} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.098316] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1784.098609] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1784.098792] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1784.098963] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleting the datastore file [datastore1] 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1784.101360] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1784.101651] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1784.101811] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1784.101990] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1784.103156] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1784.103156] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1784.103156] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1784.103322] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1784.103705] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1784.103915] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1784.104338] env[62619]: DEBUG nova.virt.hardware [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1784.105583] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.105583] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.105696] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1784.105918] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c73be28b-5446-4690-b9c7-dc4ba77372d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.108576] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b7e97e-5817-4ed7-af55-89f376951002 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.111366] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de30416d-03cf-4e99-a7fb-03e4191f10e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.116863] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1784.116863] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5211c76d-c040-1bbe-1930-ae6bec7e6575" [ 1784.116863] env[62619]: _type = "Task" [ 1784.116863] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.124202] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75db64e4-89dd-4518-be78-10ebddf842fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.129567] env[62619]: DEBUG oslo_vmware.api [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for the task: (returnval){ [ 1784.129567] env[62619]: value = "task-1778304" [ 1784.129567] env[62619]: _type = "Task" [ 1784.129567] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.153670] env[62619]: DEBUG nova.compute.manager [req-f988878b-cca7-48d8-bbaf-6978e0afd75f req-22b5f1fb-a2a7-4fac-b855-da826e894844 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Received event network-changed-2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1784.153866] env[62619]: DEBUG nova.compute.manager [req-f988878b-cca7-48d8-bbaf-6978e0afd75f req-22b5f1fb-a2a7-4fac-b855-da826e894844 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Refreshing instance network info cache due to event network-changed-2924458a-bf48-482f-ab31-ad34e83a94d4. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1784.154068] env[62619]: DEBUG oslo_concurrency.lockutils [req-f988878b-cca7-48d8-bbaf-6978e0afd75f req-22b5f1fb-a2a7-4fac-b855-da826e894844 service nova] Acquiring lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.155688] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.155913] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Processing image d1791572-abf0-49e9-9ccd-ae11e1d9d561 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1784.156165] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561/d1791572-abf0-49e9-9ccd-ae11e1d9d561.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.156293] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561/d1791572-abf0-49e9-9ccd-ae11e1d9d561.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.156466] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1784.156909] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88fbe918-dded-4135-b222-5721505359db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.162559] env[62619]: DEBUG oslo_vmware.api [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.164386] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: a5e4c524-7cc8-4981-899e-1a7c80fac2bd] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1784.171760] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1784.171760] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1784.172646] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66d18c44-7626-4303-91f1-9d202d64b80b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.181354] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1784.181354] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5204277e-d634-1249-a115-0a9441f01aff" [ 1784.181354] env[62619]: _type = "Task" [ 1784.181354] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.190589] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5204277e-d634-1249-a115-0a9441f01aff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.215349] env[62619]: DEBUG nova.compute.manager [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1784.215712] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1784.217033] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36caeac8-06f4-4440-9de4-e6f5b527d3c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.228463] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1784.228852] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92deb804-b4c7-42bb-9e18-41580b6ed2cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.237600] env[62619]: DEBUG oslo_vmware.api [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1784.237600] env[62619]: value = "task-1778305" [ 1784.237600] env[62619]: _type = "Task" [ 1784.237600] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.251961] env[62619]: DEBUG oslo_vmware.api [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778305, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.365915] env[62619]: DEBUG oslo_concurrency.lockutils [req-eb3b29f5-295c-427d-86b8-9ce1105efe41 req-f3002607-6df6-42a8-848d-42659b5071de service nova] Releasing lock "refresh_cache-5cf7ca57-351f-48ab-8758-b30f50cd607f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.401890] env[62619]: INFO nova.compute.manager [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Shelve offloading [ 1784.489103] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.469s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.490267] env[62619]: DEBUG nova.compute.manager [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1784.493755] env[62619]: DEBUG oslo_concurrency.lockutils [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.989s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.494522] env[62619]: DEBUG nova.objects.instance [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lazy-loading 'resources' on Instance uuid 7cb51b51-514d-4223-a82a-5cdbdab9482a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1784.537111] env[62619]: DEBUG nova.network.neutron [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Successfully updated port: 165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1784.640882] env[62619]: DEBUG oslo_vmware.api [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Task: {'id': task-1778304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.371951} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.642029] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1784.642029] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1784.642029] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1784.642029] env[62619]: INFO nova.compute.manager [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1784.642029] env[62619]: DEBUG oslo.service.loopingcall [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1784.642282] env[62619]: DEBUG nova.compute.manager [-] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1784.642282] env[62619]: DEBUG nova.network.neutron [-] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1784.667643] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: ec56c824-5f9a-47bf-bcd6-e456ddaad2f2] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1784.693673] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Preparing fetch location {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1784.693983] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Fetch image to [datastore1] OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9/OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9.vmdk {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1784.694191] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Downloading stream optimized image d1791572-abf0-49e9-9ccd-ae11e1d9d561 to [datastore1] OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9/OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9.vmdk on the data store datastore1 as vApp {{(pid=62619) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1784.694433] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Downloading image file data d1791572-abf0-49e9-9ccd-ae11e1d9d561 to the ESX as VM named 'OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9' {{(pid=62619) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1784.747905] env[62619]: DEBUG oslo_vmware.api [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778305, 'name': PowerOffVM_Task, 'duration_secs': 0.28316} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.748148] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1784.748323] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1784.748567] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-396da87a-3435-4c77-8669-861a77f0efe1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.779120] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1784.779120] env[62619]: value = "resgroup-9" [ 1784.779120] env[62619]: _type = "ResourcePool" [ 1784.779120] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1784.779492] env[62619]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-545fb0a2-d189-4860-9d8b-dd5cd5ec3c26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.808985] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lease: (returnval){ [ 1784.808985] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bc3ae8-fa16-f726-1bab-bc35f9d1dc27" [ 1784.808985] env[62619]: _type = "HttpNfcLease" [ 1784.808985] env[62619]: } obtained for vApp import into resource pool (val){ [ 1784.808985] env[62619]: value = "resgroup-9" [ 1784.808985] env[62619]: _type = "ResourcePool" [ 1784.808985] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1784.809313] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the lease: (returnval){ [ 1784.809313] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bc3ae8-fa16-f726-1bab-bc35f9d1dc27" [ 1784.809313] env[62619]: _type = "HttpNfcLease" [ 1784.809313] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1784.821290] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1784.821290] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bc3ae8-fa16-f726-1bab-bc35f9d1dc27" [ 1784.821290] env[62619]: _type = "HttpNfcLease" [ 1784.821290] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1784.869801] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1784.869801] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1784.869801] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleting the datastore file [datastore1] 11869077-b428-413f-9f8f-7eac08d2d9ec {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1784.869801] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b27fb7f-98df-48e3-80e7-83603b9636d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.878515] env[62619]: DEBUG nova.network.neutron [-] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.880609] env[62619]: DEBUG oslo_vmware.api [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1784.880609] env[62619]: value = "task-1778308" [ 1784.880609] env[62619]: _type = "Task" [ 1784.880609] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.885638] env[62619]: DEBUG nova.network.neutron [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updating instance_info_cache with network_info: [{"id": "2924458a-bf48-482f-ab31-ad34e83a94d4", "address": "fa:16:3e:c5:5c:af", "network": {"id": "c853257c-5523-4c7c-ac39-b96dd377e1fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1460025320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767e455ac6ef43d1b587e3e953ed8a9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2924458a-bf", "ovs_interfaceid": "2924458a-bf48-482f-ab31-ad34e83a94d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.894715] env[62619]: DEBUG oslo_vmware.api [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.905338] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1784.905872] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3258d888-d1b3-4b2d-8006-9efed626543c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.915511] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1784.915511] env[62619]: value = "task-1778309" [ 1784.915511] env[62619]: _type = "Task" [ 1784.915511] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.928187] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1784.928438] env[62619]: DEBUG nova.compute.manager [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1784.929242] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f54d516-ebb9-4d10-8875-aec70a9f908b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.936212] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.936441] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.936606] env[62619]: DEBUG nova.network.neutron [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1784.996466] env[62619]: DEBUG nova.compute.utils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1785.000522] env[62619]: DEBUG nova.compute.manager [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1785.000634] env[62619]: DEBUG nova.network.neutron [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1785.042310] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.042464] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.042624] env[62619]: DEBUG nova.network.neutron [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1785.059864] env[62619]: DEBUG nova.policy [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25159d73422b45dbbe4bab2b2a835055', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df50ba9d97ac4c059077c87f9cfdb719', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1785.170918] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: a6ba8114-0261-4894-98c0-9e0360f6d256] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1785.215439] env[62619]: DEBUG nova.compute.manager [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Received event network-vif-plugged-165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1785.215620] env[62619]: DEBUG oslo_concurrency.lockutils [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] Acquiring lock "52b29fef-eab6-4541-a570-af9c0c021a75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.215845] env[62619]: DEBUG oslo_concurrency.lockutils [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] Lock "52b29fef-eab6-4541-a570-af9c0c021a75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.216476] env[62619]: DEBUG oslo_concurrency.lockutils [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] Lock "52b29fef-eab6-4541-a570-af9c0c021a75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.216476] env[62619]: DEBUG nova.compute.manager [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] No waiting events found dispatching network-vif-plugged-165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1785.216476] env[62619]: WARNING nova.compute.manager [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Received unexpected event network-vif-plugged-165f6c4a-b24e-4c32-845f-891bf7478563 for instance with vm_state building and task_state spawning. [ 1785.216652] env[62619]: DEBUG nova.compute.manager [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Received event network-changed-165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1785.216682] env[62619]: DEBUG nova.compute.manager [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Refreshing instance network info cache due to event network-changed-165f6c4a-b24e-4c32-845f-891bf7478563. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1785.216821] env[62619]: DEBUG oslo_concurrency.lockutils [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] Acquiring lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.254941] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d094b9-ada7-4728-aa51-eb9f58cebc4d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.263662] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674844fa-0094-4b41-b47c-97aee590559c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.300443] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c0525c-7008-4987-a9d8-3198d6ea5c72 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.309577] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b26227e-023c-40fa-8ae2-edfc9bd865ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.321024] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1785.321024] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bc3ae8-fa16-f726-1bab-bc35f9d1dc27" [ 1785.321024] env[62619]: _type = "HttpNfcLease" [ 1785.321024] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1785.329576] env[62619]: DEBUG nova.compute.provider_tree [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1785.381121] env[62619]: INFO nova.compute.manager [-] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Took 1.37 seconds to deallocate network for instance. [ 1785.381398] env[62619]: DEBUG nova.network.neutron [-] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.394353] env[62619]: DEBUG oslo_concurrency.lockutils [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Releasing lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.394353] env[62619]: DEBUG nova.compute.manager [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Inject network info {{(pid=62619) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7600}} [ 1785.394353] env[62619]: DEBUG nova.compute.manager [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] network_info to inject: |[{"id": "2924458a-bf48-482f-ab31-ad34e83a94d4", "address": "fa:16:3e:c5:5c:af", "network": {"id": "c853257c-5523-4c7c-ac39-b96dd377e1fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1460025320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767e455ac6ef43d1b587e3e953ed8a9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2924458a-bf", "ovs_interfaceid": "2924458a-bf48-482f-ab31-ad34e83a94d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7601}} [ 1785.397203] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Reconfiguring VM instance to set the machine id {{(pid=62619) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1785.398521] env[62619]: DEBUG oslo_concurrency.lockutils [req-f988878b-cca7-48d8-bbaf-6978e0afd75f req-22b5f1fb-a2a7-4fac-b855-da826e894844 service nova] Acquired lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.398718] env[62619]: DEBUG nova.network.neutron [req-f988878b-cca7-48d8-bbaf-6978e0afd75f req-22b5f1fb-a2a7-4fac-b855-da826e894844 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Refreshing network info cache for port 2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1785.400407] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58cba74c-4dfa-4f6c-8671-d4667dc4ad13 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.430919] env[62619]: DEBUG oslo_vmware.api [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163196} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.430919] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1785.430919] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1785.430919] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1785.430919] env[62619]: INFO nova.compute.manager [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1785.430919] env[62619]: DEBUG oslo.service.loopingcall [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1785.430919] env[62619]: DEBUG oslo_vmware.api [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1785.430919] env[62619]: value = "task-1778310" [ 1785.430919] env[62619]: _type = "Task" [ 1785.430919] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.430919] env[62619]: DEBUG nova.compute.manager [-] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1785.430919] env[62619]: DEBUG nova.network.neutron [-] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1785.446856] env[62619]: DEBUG oslo_vmware.api [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778310, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.453182] env[62619]: DEBUG nova.network.neutron [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Successfully created port: d1c7728e-1484-4294-ab32-b78e9572ada7 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1785.503678] env[62619]: DEBUG nova.compute.manager [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1785.576603] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.576875] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.598145] env[62619]: DEBUG nova.network.neutron [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1785.674746] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4b2e9965-cbd4-4d98-b003-436b4a8c913e] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1785.824436] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1785.824436] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bc3ae8-fa16-f726-1bab-bc35f9d1dc27" [ 1785.824436] env[62619]: _type = "HttpNfcLease" [ 1785.824436] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1785.824866] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1785.824866] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bc3ae8-fa16-f726-1bab-bc35f9d1dc27" [ 1785.824866] env[62619]: _type = "HttpNfcLease" [ 1785.824866] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1785.825692] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0a619c-0375-41d8-ae18-7ce1e4cdf748 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.832564] env[62619]: DEBUG nova.scheduler.client.report [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1785.842972] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c70e5d-8673-37e7-89ca-2a5514ea5406/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1785.844921] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c70e5d-8673-37e7-89ca-2a5514ea5406/disk-0.vmdk. {{(pid=62619) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1785.847630] env[62619]: DEBUG nova.network.neutron [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updating instance_info_cache with network_info: [{"id": "165f6c4a-b24e-4c32-845f-891bf7478563", "address": "fa:16:3e:ec:67:49", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165f6c4a-b2", "ovs_interfaceid": "165f6c4a-b24e-4c32-845f-891bf7478563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.911808] env[62619]: INFO nova.compute.manager [-] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Took 1.27 seconds to deallocate network for instance. [ 1785.912865] env[62619]: DEBUG oslo_concurrency.lockutils [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.917284] env[62619]: DEBUG nova.network.neutron [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Updating instance_info_cache with network_info: [{"id": "e6fbdc52-5c2d-4d4f-9c92-77d76129374f", "address": "fa:16:3e:39:18:68", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6fbdc52-5c", "ovs_interfaceid": "e6fbdc52-5c2d-4d4f-9c92-77d76129374f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.928603] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c837b2a4-9730-418f-8f51-6989657aa57d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.949343] env[62619]: DEBUG oslo_vmware.api [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778310, 'name': ReconfigVM_Task, 'duration_secs': 0.195225} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.949859] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-620838d3-d89b-4d1c-b38b-4374ad10f4da tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Reconfigured VM instance to set the machine id {{(pid=62619) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1785.988465] env[62619]: DEBUG oslo_concurrency.lockutils [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquiring lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.991139] env[62619]: DEBUG oslo_concurrency.lockutils [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.991139] env[62619]: DEBUG oslo_concurrency.lockutils [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquiring lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.991139] env[62619]: DEBUG oslo_concurrency.lockutils [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.991139] env[62619]: DEBUG oslo_concurrency.lockutils [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.991839] env[62619]: INFO nova.compute.manager [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Terminating instance [ 1786.079453] env[62619]: DEBUG nova.compute.manager [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1786.178427] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: c72bbf2b-e928-41b6-ac9a-0cdd32de8cd3] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1786.345984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.349588] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 8.252s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.351471] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.351588] env[62619]: DEBUG nova.compute.manager [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Instance network_info: |[{"id": "165f6c4a-b24e-4c32-845f-891bf7478563", "address": "fa:16:3e:ec:67:49", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165f6c4a-b2", "ovs_interfaceid": "165f6c4a-b24e-4c32-845f-891bf7478563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1786.352096] env[62619]: DEBUG oslo_concurrency.lockutils [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] Acquired lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.352341] env[62619]: DEBUG nova.network.neutron [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Refreshing network info cache for port 165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1786.353408] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:67:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '165f6c4a-b24e-4c32-845f-891bf7478563', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1786.361805] env[62619]: DEBUG oslo.service.loopingcall [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1786.368185] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1786.369164] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ef987e0-03ec-4841-b1ba-08874074c03a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.390531] env[62619]: INFO nova.scheduler.client.report [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted allocations for instance 7cb51b51-514d-4223-a82a-5cdbdab9482a [ 1786.405310] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1786.405310] env[62619]: value = "task-1778311" [ 1786.405310] env[62619]: _type = "Task" [ 1786.405310] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.420268] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778311, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.425629] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.429941] env[62619]: DEBUG nova.network.neutron [req-f988878b-cca7-48d8-bbaf-6978e0afd75f req-22b5f1fb-a2a7-4fac-b855-da826e894844 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updated VIF entry in instance network info cache for port 2924458a-bf48-482f-ab31-ad34e83a94d4. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1786.430382] env[62619]: DEBUG nova.network.neutron [req-f988878b-cca7-48d8-bbaf-6978e0afd75f req-22b5f1fb-a2a7-4fac-b855-da826e894844 service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updating instance_info_cache with network_info: [{"id": "2924458a-bf48-482f-ab31-ad34e83a94d4", "address": "fa:16:3e:c5:5c:af", "network": {"id": "c853257c-5523-4c7c-ac39-b96dd377e1fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1460025320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767e455ac6ef43d1b587e3e953ed8a9d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2924458a-bf", "ovs_interfaceid": "2924458a-bf48-482f-ab31-ad34e83a94d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.435325] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.496596] env[62619]: DEBUG nova.compute.manager [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1786.496823] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1786.497942] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebc6fb8-dc7d-409f-a02d-ade7bdd4f215 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.506152] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1786.506377] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64eb698f-8443-4e3b-bab6-8510d9aaa5f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.512258] env[62619]: DEBUG nova.compute.manager [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1786.516194] env[62619]: DEBUG oslo_vmware.api [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for the task: (returnval){ [ 1786.516194] env[62619]: value = "task-1778312" [ 1786.516194] env[62619]: _type = "Task" [ 1786.516194] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.529795] env[62619]: DEBUG oslo_vmware.api [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778312, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.605607] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.671794] env[62619]: DEBUG nova.network.neutron [-] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.681775] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 6cd2f6e6-79a4-41be-a349-b504028ecab4] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1786.724944] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1786.725115] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1786.725292] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1786.725530] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1786.725704] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1786.725854] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1786.726656] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1786.727057] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1786.727340] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1786.727536] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1786.727736] env[62619]: DEBUG nova.virt.hardware [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1786.729751] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff0767d-bf7a-4423-91b3-094a36c9a4fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.751666] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed8f943-1e1d-43b9-83c3-7925d8545c1d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.781156] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1786.782500] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c03c26-2f7a-417b-9635-852a4e8920fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.791289] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1786.792347] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09fd99ad-8d09-4a40-9533-5e7d2951ce10 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.856204] env[62619]: INFO nova.compute.claims [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1786.904817] env[62619]: DEBUG oslo_concurrency.lockutils [None req-69a83228-dd23-4920-8408-27762a0e5fb3 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "7cb51b51-514d-4223-a82a-5cdbdab9482a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.876s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.922367] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1786.922920] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1786.926018] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleting the datastore file [datastore1] 88f79718-97d0-432c-8515-b60ab3dfd7e0 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1786.926018] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac5e8068-9e52-41a8-bacb-2c63ffe6441c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.933203] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778311, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.937134] env[62619]: DEBUG oslo_concurrency.lockutils [req-f988878b-cca7-48d8-bbaf-6978e0afd75f req-22b5f1fb-a2a7-4fac-b855-da826e894844 service nova] Releasing lock "refresh_cache-32aed8cd-1583-4253-bfb6-a98610e2f32e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.944556] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1786.944556] env[62619]: value = "task-1778314" [ 1786.944556] env[62619]: _type = "Task" [ 1786.944556] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.958098] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778314, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.035119] env[62619]: DEBUG oslo_vmware.api [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778312, 'name': PowerOffVM_Task, 'duration_secs': 0.266124} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.037468] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1787.037831] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1787.038290] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8967fe4-4181-4d23-9ab8-e3544ab4f6b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.137882] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1787.138219] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1787.138461] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Deleting the datastore file [datastore1] f3345332-5a22-4a1c-ac74-4e8f2ceb3f15 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1787.138793] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb317ea1-463b-469c-ba9d-5653a474ea1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.149409] env[62619]: DEBUG oslo_vmware.api [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for the task: (returnval){ [ 1787.149409] env[62619]: value = "task-1778316" [ 1787.149409] env[62619]: _type = "Task" [ 1787.149409] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.160841] env[62619]: DEBUG oslo_vmware.api [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.162531] env[62619]: DEBUG nova.network.neutron [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updated VIF entry in instance network info cache for port 165f6c4a-b24e-4c32-845f-891bf7478563. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1787.163012] env[62619]: DEBUG nova.network.neutron [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updating instance_info_cache with network_info: [{"id": "165f6c4a-b24e-4c32-845f-891bf7478563", "address": "fa:16:3e:ec:67:49", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165f6c4a-b2", "ovs_interfaceid": "165f6c4a-b24e-4c32-845f-891bf7478563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.174636] env[62619]: INFO nova.compute.manager [-] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Took 1.74 seconds to deallocate network for instance. [ 1787.187309] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 5b1008fb-7c0a-4e12-90f8-119a82ea62f1] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1787.321478] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Completed reading data from the image iterator. {{(pid=62619) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1787.321478] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c70e5d-8673-37e7-89ca-2a5514ea5406/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1787.321478] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c2dfa5-3858-4f22-b8fb-2c31f2cc509e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.331797] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c70e5d-8673-37e7-89ca-2a5514ea5406/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1787.332471] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c70e5d-8673-37e7-89ca-2a5514ea5406/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1787.332890] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-2a540f3b-3721-45a8-ab69-a8fb9cf18774 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.365550] env[62619]: INFO nova.compute.resource_tracker [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating resource usage from migration 66600ae4-092e-4962-9c0b-51362373240f [ 1787.425020] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778311, 'name': CreateVM_Task, 'duration_secs': 0.680658} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.425020] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1787.425020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.425020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.425020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1787.425020] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2341692-3533-4720-ad4d-d8f3f710fdd4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.434040] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1787.434040] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525f388e-5a08-27cd-0543-716e69f38e92" [ 1787.434040] env[62619]: _type = "Task" [ 1787.434040] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.445505] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525f388e-5a08-27cd-0543-716e69f38e92, 'name': SearchDatastore_Task, 'duration_secs': 0.010166} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.448894] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.449091] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1787.449597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.449597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.449703] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1787.449902] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23160186-e0bb-4c64-849d-cd99d0f2117e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.459018] env[62619]: DEBUG oslo_vmware.api [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778314, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269036} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.460240] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1787.460456] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1787.460636] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1787.463309] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1787.463405] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1787.464798] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40a39901-0bce-4c19-9eaa-8ed750e89bd9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.472413] env[62619]: DEBUG nova.network.neutron [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Successfully updated port: d1c7728e-1484-4294-ab32-b78e9572ada7 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1787.478289] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1787.478289] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52782dd7-0f5d-dae7-664a-4d7df27ce9fb" [ 1787.478289] env[62619]: _type = "Task" [ 1787.478289] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.489658] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52782dd7-0f5d-dae7-664a-4d7df27ce9fb, 'name': SearchDatastore_Task, 'duration_secs': 0.010699} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.494581] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24d1f289-3ffa-41ab-94b4-f02f3e8cccb2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.499880] env[62619]: INFO nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted allocations for instance 88f79718-97d0-432c-8515-b60ab3dfd7e0 [ 1787.506074] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1787.506074] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5216d067-e175-cafc-b2de-d03306f2d535" [ 1787.506074] env[62619]: _type = "Task" [ 1787.506074] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.522075] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5216d067-e175-cafc-b2de-d03306f2d535, 'name': SearchDatastore_Task, 'duration_secs': 0.009603} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.522729] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.523026] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 52b29fef-eab6-4541-a570-af9c0c021a75/52b29fef-eab6-4541-a570-af9c0c021a75.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1787.526085] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f9101ba-3af0-477a-891f-44aa1b01388b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.537314] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1787.537314] env[62619]: value = "task-1778317" [ 1787.537314] env[62619]: _type = "Task" [ 1787.537314] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.539528] env[62619]: DEBUG oslo_vmware.rw_handles [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5217e6db-dbb2-045e-b64b-c7ef164e06ea/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1787.541182] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cc9cc4-ef01-45a1-bcdf-3b5c2d4fe7a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.551644] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c70e5d-8673-37e7-89ca-2a5514ea5406/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1787.551910] env[62619]: INFO nova.virt.vmwareapi.images [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Downloaded image file data d1791572-abf0-49e9-9ccd-ae11e1d9d561 [ 1787.556016] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f0f776-76cb-4e1e-8c6e-f67610a9b1e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.560943] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.561219] env[62619]: DEBUG oslo_vmware.rw_handles [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5217e6db-dbb2-045e-b64b-c7ef164e06ea/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1787.561380] env[62619]: ERROR oslo_vmware.rw_handles [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5217e6db-dbb2-045e-b64b-c7ef164e06ea/disk-0.vmdk due to incomplete transfer. [ 1787.564359] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-340c6482-15ec-47e2-a602-1084d047d2a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.579782] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-677f2f0d-8da8-47a8-8b4b-cd2bc7806014 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.581576] env[62619]: DEBUG oslo_vmware.rw_handles [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5217e6db-dbb2-045e-b64b-c7ef164e06ea/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1787.581809] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Uploaded image 1b367f3f-dbe9-40fa-82bb-59204b059dc3 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1787.583626] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1787.586460] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d0664871-9970-4c20-8f59-e14e921b723f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.595504] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1787.595504] env[62619]: value = "task-1778318" [ 1787.595504] env[62619]: _type = "Task" [ 1787.595504] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.601753] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquiring lock "32aed8cd-1583-4253-bfb6-a98610e2f32e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.601995] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lock "32aed8cd-1583-4253-bfb6-a98610e2f32e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.602224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquiring lock "32aed8cd-1583-4253-bfb6-a98610e2f32e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.602410] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lock "32aed8cd-1583-4253-bfb6-a98610e2f32e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.602578] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lock "32aed8cd-1583-4253-bfb6-a98610e2f32e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.605048] env[62619]: INFO nova.compute.manager [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Terminating instance [ 1787.609763] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778318, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.615020] env[62619]: DEBUG nova.compute.manager [req-3d2f9622-da8f-433c-8efe-1266c5af1d1f req-3b59093c-cd16-43bb-8b77-5085ec60380e service nova] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Received event network-vif-deleted-e81bada3-ed22-4c30-a4dd-4c371831520b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1787.620859] env[62619]: INFO nova.virt.vmwareapi.images [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] The imported VM was unregistered [ 1787.623834] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Caching image {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1787.624136] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating directory with path [datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1787.625662] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3374cee2-a008-44ab-a9ad-1ed651e55af4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.650225] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Created directory with path [datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1787.650444] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9/OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9.vmdk to [datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561/d1791572-abf0-49e9-9ccd-ae11e1d9d561.vmdk. {{(pid=62619) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1787.656627] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-a491c838-f04c-451e-9cce-18ad59c2a989 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.666654] env[62619]: DEBUG oslo_concurrency.lockutils [req-852947c2-a333-4b04-bfc7-a34c9d7ba9af req-84c1f761-5d1e-4d4e-99aa-73af66be6b09 service nova] Releasing lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.667132] env[62619]: DEBUG oslo_vmware.api [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Task: {'id': task-1778316, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197816} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.668688] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1787.668925] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1787.669172] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1787.669375] env[62619]: INFO nova.compute.manager [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1787.669621] env[62619]: DEBUG oslo.service.loopingcall [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1787.669906] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1787.669906] env[62619]: value = "task-1778320" [ 1787.669906] env[62619]: _type = "Task" [ 1787.669906] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.670082] env[62619]: DEBUG nova.compute.manager [-] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1787.670184] env[62619]: DEBUG nova.network.neutron [-] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1787.685289] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.685644] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778320, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.691141] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: b1c3c213-599d-4cab-8224-d87467d774c9] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1787.709751] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef94d36e-5e91-4572-b815-f923f3777e1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.718835] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67d4ead-138f-4cd8-8b97-3616d666b336 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.757475] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99bdaf29-9a01-4c7c-b13f-ed8f1e3a5d66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.767236] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154a65d2-1edc-4573-9ffb-a39974d10f72 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.784651] env[62619]: DEBUG nova.compute.provider_tree [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1787.800053] env[62619]: DEBUG nova.compute.manager [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Received event network-vif-deleted-a10c5399-b021-4ea7-8a41-4d58136aff12 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1787.800053] env[62619]: DEBUG nova.compute.manager [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Received event network-vif-deleted-6ee41f5a-d2d4-4e4d-b30f-5f747eadbb81 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1787.800053] env[62619]: DEBUG nova.compute.manager [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Received event network-vif-unplugged-e6fbdc52-5c2d-4d4f-9c92-77d76129374f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1787.800053] env[62619]: DEBUG oslo_concurrency.lockutils [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] Acquiring lock "88f79718-97d0-432c-8515-b60ab3dfd7e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.800053] env[62619]: DEBUG oslo_concurrency.lockutils [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.800053] env[62619]: DEBUG oslo_concurrency.lockutils [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.800053] env[62619]: DEBUG nova.compute.manager [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] No waiting events found dispatching network-vif-unplugged-e6fbdc52-5c2d-4d4f-9c92-77d76129374f {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1787.800053] env[62619]: WARNING nova.compute.manager [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Received unexpected event network-vif-unplugged-e6fbdc52-5c2d-4d4f-9c92-77d76129374f for instance with vm_state shelved_offloaded and task_state None. [ 1787.800053] env[62619]: DEBUG nova.compute.manager [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Received event network-changed-e6fbdc52-5c2d-4d4f-9c92-77d76129374f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1787.800053] env[62619]: DEBUG nova.compute.manager [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Refreshing instance network info cache due to event network-changed-e6fbdc52-5c2d-4d4f-9c92-77d76129374f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1787.800053] env[62619]: DEBUG oslo_concurrency.lockutils [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] Acquiring lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.800053] env[62619]: DEBUG oslo_concurrency.lockutils [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] Acquired lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.800053] env[62619]: DEBUG nova.network.neutron [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Refreshing network info cache for port e6fbdc52-5c2d-4d4f-9c92-77d76129374f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1787.850555] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "a250f05d-cd74-436d-b656-2a9e55527809" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.850782] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "a250f05d-cd74-436d-b656-2a9e55527809" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.980719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "refresh_cache-0987b6ec-2587-4f15-adbb-f563e19ecce9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.980719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "refresh_cache-0987b6ec-2587-4f15-adbb-f563e19ecce9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.980719] env[62619]: DEBUG nova.network.neutron [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1788.007763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.052381] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778317, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.111923] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778318, 'name': Destroy_Task, 'duration_secs': 0.403477} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.112117] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Destroyed the VM [ 1788.112431] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1788.112594] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6df4b37c-a39f-4c2b-891c-9cfe62fefaca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.115158] env[62619]: DEBUG nova.compute.manager [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1788.115261] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1788.116114] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9cf531-e8c5-4a47-a1d8-432f16500cd0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.125754] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1788.127459] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2e65e6e-bf3e-488a-bf21-d79ecfa21e94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.131235] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1788.131235] env[62619]: value = "task-1778321" [ 1788.131235] env[62619]: _type = "Task" [ 1788.131235] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.141530] env[62619]: DEBUG oslo_vmware.api [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1788.141530] env[62619]: value = "task-1778322" [ 1788.141530] env[62619]: _type = "Task" [ 1788.141530] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.153306] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778321, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.161266] env[62619]: DEBUG oslo_vmware.api [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778322, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.185973] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778320, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.194975] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: ae37cae9-c82e-4775-8a8f-6bbf9108b0bd] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1788.288675] env[62619]: DEBUG nova.scheduler.client.report [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1788.353176] env[62619]: DEBUG nova.compute.manager [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1788.540388] env[62619]: DEBUG nova.network.neutron [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1788.565156] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778317, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.950036} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.566211] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 52b29fef-eab6-4541-a570-af9c0c021a75/52b29fef-eab6-4541-a570-af9c0c021a75.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1788.567773] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1788.568136] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc8e7fd5-48c0-40a3-a2a8-c0cf5abd329e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.583350] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1788.583350] env[62619]: value = "task-1778323" [ 1788.583350] env[62619]: _type = "Task" [ 1788.583350] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.599868] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778323, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.603199] env[62619]: DEBUG nova.network.neutron [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Updated VIF entry in instance network info cache for port e6fbdc52-5c2d-4d4f-9c92-77d76129374f. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1788.603565] env[62619]: DEBUG nova.network.neutron [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Updating instance_info_cache with network_info: [{"id": "e6fbdc52-5c2d-4d4f-9c92-77d76129374f", "address": "fa:16:3e:39:18:68", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": null, "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tape6fbdc52-5c", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.651226] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778321, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.655369] env[62619]: DEBUG oslo_vmware.api [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778322, 'name': PowerOffVM_Task, 'duration_secs': 0.279756} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.655659] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1788.655822] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1788.656168] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-008fd445-5950-475e-ade4-00fb19038377 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.688060] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778320, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.699009] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: fab3d689-9e30-4afd-b0cc-49c6d2870c50] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1788.794663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.445s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.794873] env[62619]: INFO nova.compute.manager [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Migrating [ 1788.807028] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.781s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.808104] env[62619]: DEBUG nova.objects.instance [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'resources' on Instance uuid 4983b333-debb-4a2b-b28d-b321f0d8d7d7 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1788.819518] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1788.820583] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1788.820967] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Deleting the datastore file [datastore1] 32aed8cd-1583-4253-bfb6-a98610e2f32e {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1788.823188] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b63029ed-2899-4509-9b29-6d0e23e373de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.832470] env[62619]: DEBUG oslo_vmware.api [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for the task: (returnval){ [ 1788.832470] env[62619]: value = "task-1778325" [ 1788.832470] env[62619]: _type = "Task" [ 1788.832470] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.845347] env[62619]: DEBUG oslo_vmware.api [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778325, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.864324] env[62619]: DEBUG nova.network.neutron [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Updating instance_info_cache with network_info: [{"id": "d1c7728e-1484-4294-ab32-b78e9572ada7", "address": "fa:16:3e:eb:ab:79", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1c7728e-14", "ovs_interfaceid": "d1c7728e-1484-4294-ab32-b78e9572ada7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.876658] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.959430] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "d3aa352b-7d2d-416e-a579-9636619bb025" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.960463] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "d3aa352b-7d2d-416e-a579-9636619bb025" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.019823] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "88f79718-97d0-432c-8515-b60ab3dfd7e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.077023] env[62619]: DEBUG nova.network.neutron [-] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1789.099621] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778323, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.106545] env[62619]: DEBUG oslo_concurrency.lockutils [req-0084e02c-0efd-4e39-91e3-4b06466946f9 req-b4795a74-25cf-406f-b90a-6f951129bf86 service nova] Releasing lock "refresh_cache-88f79718-97d0-432c-8515-b60ab3dfd7e0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.154237] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778321, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.185602] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778320, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.202420] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 1f86b805-0fde-4bda-9a94-d440a670e23c] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1789.323041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.323300] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.323564] env[62619]: DEBUG nova.network.neutron [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1789.349028] env[62619]: DEBUG oslo_vmware.api [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778325, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.368311] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "refresh_cache-0987b6ec-2587-4f15-adbb-f563e19ecce9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.368311] env[62619]: DEBUG nova.compute.manager [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Instance network_info: |[{"id": "d1c7728e-1484-4294-ab32-b78e9572ada7", "address": "fa:16:3e:eb:ab:79", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1c7728e-14", "ovs_interfaceid": "d1c7728e-1484-4294-ab32-b78e9572ada7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1789.368949] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:ab:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1c7728e-1484-4294-ab32-b78e9572ada7', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1789.376630] env[62619]: DEBUG oslo.service.loopingcall [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1789.377410] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1789.380129] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9af7c4e-716a-4a6c-9d89-28d4ab0e1303 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.401998] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1789.401998] env[62619]: value = "task-1778326" [ 1789.401998] env[62619]: _type = "Task" [ 1789.401998] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.413232] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778326, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.464873] env[62619]: DEBUG nova.compute.manager [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1789.579431] env[62619]: INFO nova.compute.manager [-] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Took 1.91 seconds to deallocate network for instance. [ 1789.598020] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778323, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.75227} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.600894] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1789.602061] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f2749d-7cad-46e4-946b-fba3a25857d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.627545] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 52b29fef-eab6-4541-a570-af9c0c021a75/52b29fef-eab6-4541-a570-af9c0c021a75.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1789.631242] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec547503-9a20-444b-8f55-c6a3067fdcaa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.652692] env[62619]: DEBUG nova.compute.manager [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Received event network-vif-plugged-d1c7728e-1484-4294-ab32-b78e9572ada7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1789.652918] env[62619]: DEBUG oslo_concurrency.lockutils [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] Acquiring lock "0987b6ec-2587-4f15-adbb-f563e19ecce9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.653155] env[62619]: DEBUG oslo_concurrency.lockutils [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] Lock "0987b6ec-2587-4f15-adbb-f563e19ecce9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.653408] env[62619]: DEBUG oslo_concurrency.lockutils [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] Lock "0987b6ec-2587-4f15-adbb-f563e19ecce9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.653486] env[62619]: DEBUG nova.compute.manager [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] No waiting events found dispatching network-vif-plugged-d1c7728e-1484-4294-ab32-b78e9572ada7 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1789.653653] env[62619]: WARNING nova.compute.manager [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Received unexpected event network-vif-plugged-d1c7728e-1484-4294-ab32-b78e9572ada7 for instance with vm_state building and task_state spawning. [ 1789.653835] env[62619]: DEBUG nova.compute.manager [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Received event network-changed-d1c7728e-1484-4294-ab32-b78e9572ada7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1789.653942] env[62619]: DEBUG nova.compute.manager [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Refreshing instance network info cache due to event network-changed-d1c7728e-1484-4294-ab32-b78e9572ada7. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1789.654136] env[62619]: DEBUG oslo_concurrency.lockutils [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] Acquiring lock "refresh_cache-0987b6ec-2587-4f15-adbb-f563e19ecce9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.654279] env[62619]: DEBUG oslo_concurrency.lockutils [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] Acquired lock "refresh_cache-0987b6ec-2587-4f15-adbb-f563e19ecce9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.654431] env[62619]: DEBUG nova.network.neutron [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Refreshing network info cache for port d1c7728e-1484-4294-ab32-b78e9572ada7 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1789.658041] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1789.658041] env[62619]: value = "task-1778327" [ 1789.658041] env[62619]: _type = "Task" [ 1789.658041] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.670274] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778321, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.671472] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec92ee30-06fd-428d-9ca5-d183d633aff6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.687917] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778327, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.693370] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f459a16a-e5c6-4012-87e1-9650ac2ddca1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.706590] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 0a80942c-eb86-480b-ab7b-33112dd90d28] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1789.709522] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778320, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.755125] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429e05ee-2eb4-4199-8d49-b28aa304828b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.768040] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abdd1df-b88b-4eaa-8e86-cf3d25883bbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.791016] env[62619]: DEBUG nova.compute.provider_tree [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1789.846727] env[62619]: DEBUG oslo_vmware.api [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778325, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.916923] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778326, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.988755] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.089014] env[62619]: DEBUG oslo_concurrency.lockutils [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.162499] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778321, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.174033] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778327, 'name': ReconfigVM_Task, 'duration_secs': 0.430388} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.174462] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 52b29fef-eab6-4541-a570-af9c0c021a75/52b29fef-eab6-4541-a570-af9c0c021a75.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1790.176221] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5392d9c6-676b-4c76-aead-2b8ddcacf11f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.188471] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1790.188471] env[62619]: value = "task-1778328" [ 1790.188471] env[62619]: _type = "Task" [ 1790.188471] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.191708] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778320, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.201361] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778328, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.250599] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 79dfeb2b-06d0-45f1-b97e-10fa4f00d282] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1790.293839] env[62619]: DEBUG nova.network.neutron [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance_info_cache with network_info: [{"id": "5911524f-a8b5-4591-a312-ea0cefac24df", "address": "fa:16:3e:bd:c4:f9", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5911524f-a8", "ovs_interfaceid": "5911524f-a8b5-4591-a312-ea0cefac24df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.295783] env[62619]: DEBUG nova.scheduler.client.report [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1790.348621] env[62619]: DEBUG oslo_vmware.api [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778325, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.416505] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778326, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.435144] env[62619]: DEBUG nova.network.neutron [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Updated VIF entry in instance network info cache for port d1c7728e-1484-4294-ab32-b78e9572ada7. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1790.435274] env[62619]: DEBUG nova.network.neutron [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Updating instance_info_cache with network_info: [{"id": "d1c7728e-1484-4294-ab32-b78e9572ada7", "address": "fa:16:3e:eb:ab:79", "network": {"id": "8a4ba770-f725-4b7e-9ad2-896d3711fc45", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-954890163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df50ba9d97ac4c059077c87f9cfdb719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1c7728e-14", "ovs_interfaceid": "d1c7728e-1484-4294-ab32-b78e9572ada7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.657424] env[62619]: DEBUG oslo_vmware.api [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778321, 'name': RemoveSnapshot_Task, 'duration_secs': 2.12126} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.657529] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1790.657689] env[62619]: INFO nova.compute.manager [None req-bb7acd41-f761-494c-a8eb-c64b5cf00daa tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Took 17.14 seconds to snapshot the instance on the hypervisor. [ 1790.687709] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778320, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.012338} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.687972] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9/OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9.vmdk to [datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561/d1791572-abf0-49e9-9ccd-ae11e1d9d561.vmdk. [ 1790.688195] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Cleaning up location [datastore1] OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1790.688385] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_81b2d0ae-c28a-450e-a337-0a628d726aa9 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1790.689237] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-944da564-43d8-429c-88b3-2eafd0e54ec5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.698473] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1790.698473] env[62619]: value = "task-1778329" [ 1790.698473] env[62619]: _type = "Task" [ 1790.698473] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.704561] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778328, 'name': Rename_Task, 'duration_secs': 0.337355} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.705123] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1790.705373] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc8e424e-877e-4c49-a331-95999563cb06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.709602] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778329, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.714622] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1790.714622] env[62619]: value = "task-1778330" [ 1790.714622] env[62619]: _type = "Task" [ 1790.714622] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.722476] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.756589] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 0272ca2a-e9ff-4af5-8120-278a82d74627] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1790.801137] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.803025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.996s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.805493] env[62619]: DEBUG oslo_concurrency.lockutils [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.893s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.805921] env[62619]: DEBUG nova.objects.instance [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lazy-loading 'resources' on Instance uuid 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1790.823031] env[62619]: INFO nova.scheduler.client.report [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleted allocations for instance 4983b333-debb-4a2b-b28d-b321f0d8d7d7 [ 1790.846520] env[62619]: DEBUG oslo_vmware.api [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Task: {'id': task-1778325, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.800712} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.846815] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1790.847048] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1790.847286] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1790.847645] env[62619]: INFO nova.compute.manager [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Took 2.73 seconds to destroy the instance on the hypervisor. [ 1790.847908] env[62619]: DEBUG oslo.service.loopingcall [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1790.848121] env[62619]: DEBUG nova.compute.manager [-] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1790.848213] env[62619]: DEBUG nova.network.neutron [-] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1790.921173] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778326, 'name': CreateVM_Task, 'duration_secs': 1.021487} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.921390] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1790.922091] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.922294] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.922643] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1790.922933] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aef6b220-b9e7-489f-a08d-455b090a0872 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.929603] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1790.929603] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5204457c-d315-62de-757d-44d9251d85e0" [ 1790.929603] env[62619]: _type = "Task" [ 1790.929603] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.938751] env[62619]: DEBUG oslo_concurrency.lockutils [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] Releasing lock "refresh_cache-0987b6ec-2587-4f15-adbb-f563e19ecce9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.939000] env[62619]: DEBUG nova.compute.manager [req-890af91c-8940-40d8-b00a-ed2f87e37753 req-fb2424c2-4473-49c3-8edb-fb735fdd6186 service nova] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Received event network-vif-deleted-d0aa15bf-fe9d-4cf6-8bb3-337b0a223b09 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1790.939378] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5204457c-d315-62de-757d-44d9251d85e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.210201] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778329, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035493} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.210502] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1791.210738] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561/d1791572-abf0-49e9-9ccd-ae11e1d9d561.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.210837] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561/d1791572-abf0-49e9-9ccd-ae11e1d9d561.vmdk to [datastore1] 5cf7ca57-351f-48ab-8758-b30f50cd607f/5cf7ca57-351f-48ab-8758-b30f50cd607f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1791.211111] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-243752b2-f56a-43f5-ae20-716fc6de9f48 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.221604] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1791.221604] env[62619]: value = "task-1778331" [ 1791.221604] env[62619]: _type = "Task" [ 1791.221604] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.228980] env[62619]: DEBUG oslo_vmware.api [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778330, 'name': PowerOnVM_Task, 'duration_secs': 0.481653} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.228980] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1791.228980] env[62619]: INFO nova.compute.manager [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Took 7.17 seconds to spawn the instance on the hypervisor. [ 1791.229841] env[62619]: DEBUG nova.compute.manager [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1791.230652] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95e4de6-0ec8-4026-8aca-453d63c9d8e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.238604] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.259771] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: dd8c5b0d-a7da-43e3-bab4-a369eaadcfe5] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1791.331344] env[62619]: DEBUG oslo_concurrency.lockutils [None req-be3c5cc2-d25c-410f-bd4c-dd3db1e3c041 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.370s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.332333] env[62619]: DEBUG oslo_concurrency.lockutils [req-f9c90c70-7433-4db3-b571-fe5356135cfe req-2c48fde3-5b75-4053-869c-fe90fd1317ee service nova] Acquired lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.333963] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c318dc52-a0a6-4a80-b5c4-9eaeb223357a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.346250] env[62619]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1791.347369] env[62619]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62619) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1791.349620] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76fbf714-519d-4797-b02b-7f21628a9f2a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.362395] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759cf956-4837-4c77-a797-8d3b7383a20e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.199146] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: eca829be-d425-4668-9ebd-1247c5ff19d0] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1792.200953] env[62619]: DEBUG nova.network.neutron [-] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.204292] env[62619]: DEBUG nova.compute.manager [req-708a7f58-1fac-4de8-b325-2b098d15c6fb req-450bfc77-de3a-4ed7-9553-3787940ad2cd service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Received event network-vif-deleted-2924458a-bf48-482f-ab31-ad34e83a94d4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1792.204292] env[62619]: INFO nova.compute.manager [req-708a7f58-1fac-4de8-b325-2b098d15c6fb req-450bfc77-de3a-4ed7-9553-3787940ad2cd service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Neutron deleted interface 2924458a-bf48-482f-ab31-ad34e83a94d4; detaching it from the instance and deleting it from the info cache [ 1792.204292] env[62619]: DEBUG nova.network.neutron [req-708a7f58-1fac-4de8-b325-2b098d15c6fb req-450bfc77-de3a-4ed7-9553-3787940ad2cd service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.217254] env[62619]: INFO nova.compute.manager [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Took 17.55 seconds to build instance. [ 1792.225112] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778331, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.234430] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5204457c-d315-62de-757d-44d9251d85e0, 'name': SearchDatastore_Task, 'duration_secs': 0.012752} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.243390] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.243390] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1792.243623] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.244086] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.244086] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1792.247328] env[62619]: ERROR root [req-f9c90c70-7433-4db3-b571-fe5356135cfe req-2c48fde3-5b75-4053-869c-fe90fd1317ee service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-369091' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-369091' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-369091' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-369091'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-369091' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-369091' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-369091'}\n"]: nova.exception.InstanceNotFound: Instance 4983b333-debb-4a2b-b28d-b321f0d8d7d7 could not be found. [ 1792.247586] env[62619]: DEBUG oslo_concurrency.lockutils [req-f9c90c70-7433-4db3-b571-fe5356135cfe req-2c48fde3-5b75-4053-869c-fe90fd1317ee service nova] Releasing lock "4983b333-debb-4a2b-b28d-b321f0d8d7d7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.247953] env[62619]: DEBUG nova.compute.manager [req-f9c90c70-7433-4db3-b571-fe5356135cfe req-2c48fde3-5b75-4053-869c-fe90fd1317ee service nova] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Detach interface failed, port_id=f1f8853f-2de2-45f7-8853-4591e89cac0b, reason: Instance 4983b333-debb-4a2b-b28d-b321f0d8d7d7 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1792.248577] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3e4644b-66b5-4152-a838-b10dbd3419be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.268598] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1792.268925] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1792.269595] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7adcb069-5065-47e7-9f19-c3b40e0479c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.277729] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1792.277729] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dff489-265b-f5c1-1848-55cd91ff31fe" [ 1792.277729] env[62619]: _type = "Task" [ 1792.277729] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.289356] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dff489-265b-f5c1-1848-55cd91ff31fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.485422] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc683831-a4b5-43e8-a6c7-fbcad0bd0b2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.494129] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a014ed8-5a35-461e-a1a4-f37c81a4575f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.533202] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf33d51-b1d0-45a5-b8b3-98366ded2be0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.546237] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252b2415-ab72-41c6-9765-31cd5355054b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.561326] env[62619]: DEBUG nova.compute.provider_tree [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1792.704768] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778331, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.708410] env[62619]: INFO nova.compute.manager [-] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Took 1.86 seconds to deallocate network for instance. [ 1792.710935] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 39adf15c-f77e-4737-aeeb-258887007b9a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1792.720261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb64ce4-bf25-41e8-969e-ef22156099df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.725740] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df38bbe2-11e8-49c9-81b4-08942e41a86a tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.064s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.749174] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance '917960ca-3870-4e4e-aafe-3c6d77cf7c51' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1792.753597] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57f28b13-006d-4d51-8961-7de10906a4ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.765707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6bf92d-7ae0-4061-a58b-a5d23b7794ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.792119] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52dff489-265b-f5c1-1848-55cd91ff31fe, 'name': SearchDatastore_Task, 'duration_secs': 0.084987} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.804068] env[62619]: DEBUG nova.compute.manager [req-708a7f58-1fac-4de8-b325-2b098d15c6fb req-450bfc77-de3a-4ed7-9553-3787940ad2cd service nova] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Detach interface failed, port_id=2924458a-bf48-482f-ab31-ad34e83a94d4, reason: Instance 32aed8cd-1583-4253-bfb6-a98610e2f32e could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1792.805291] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6224ab5-c740-44dd-ba18-9d1bfa9dbf6d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.812774] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1792.812774] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7e30e-4550-dad7-403f-7375220c9998" [ 1792.812774] env[62619]: _type = "Task" [ 1792.812774] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.822095] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7e30e-4550-dad7-403f-7375220c9998, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.065157] env[62619]: DEBUG nova.scheduler.client.report [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1793.206217] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778331, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.214252] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: cd8b8828-79cf-4a7c-b018-b8bd745aaa45] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1793.220695] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.255432] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1793.255708] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cce81a28-427d-4aba-aa45-8c20ca40afe1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.265430] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1793.265430] env[62619]: value = "task-1778332" [ 1793.265430] env[62619]: _type = "Task" [ 1793.265430] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.275494] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.324888] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a7e30e-4550-dad7-403f-7375220c9998, 'name': SearchDatastore_Task, 'duration_secs': 0.085021} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.325270] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.325446] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0987b6ec-2587-4f15-adbb-f563e19ecce9/0987b6ec-2587-4f15-adbb-f563e19ecce9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1793.325714] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92d4b3ee-3171-4bba-8a45-8c823f077e67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.334039] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1793.334039] env[62619]: value = "task-1778333" [ 1793.334039] env[62619]: _type = "Task" [ 1793.334039] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.343781] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.435519] env[62619]: DEBUG nova.compute.manager [req-be831541-d67b-451e-9545-13f9b477f354 req-a4725853-53d8-4199-8777-1631e5374022 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Received event network-changed-165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1793.435761] env[62619]: DEBUG nova.compute.manager [req-be831541-d67b-451e-9545-13f9b477f354 req-a4725853-53d8-4199-8777-1631e5374022 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Refreshing instance network info cache due to event network-changed-165f6c4a-b24e-4c32-845f-891bf7478563. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1793.436025] env[62619]: DEBUG oslo_concurrency.lockutils [req-be831541-d67b-451e-9545-13f9b477f354 req-a4725853-53d8-4199-8777-1631e5374022 service nova] Acquiring lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.436301] env[62619]: DEBUG oslo_concurrency.lockutils [req-be831541-d67b-451e-9545-13f9b477f354 req-a4725853-53d8-4199-8777-1631e5374022 service nova] Acquired lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.436912] env[62619]: DEBUG nova.network.neutron [req-be831541-d67b-451e-9545-13f9b477f354 req-a4725853-53d8-4199-8777-1631e5374022 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Refreshing network info cache for port 165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1793.573334] env[62619]: DEBUG oslo_concurrency.lockutils [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.768s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.576270] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.141s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.576564] env[62619]: DEBUG nova.objects.instance [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lazy-loading 'resources' on Instance uuid 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1793.591627] env[62619]: INFO nova.scheduler.client.report [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted allocations for instance 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4 [ 1793.664126] env[62619]: DEBUG nova.compute.manager [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1793.665117] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323678bb-5122-48c1-a387-dbd369ede2ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.707384] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778331, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.717837] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 78c7a111-d497-4114-b4f4-07319e6e7df2] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1793.780514] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778332, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.820055] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.820278] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.845490] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778333, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.102107] env[62619]: DEBUG oslo_concurrency.lockutils [None req-155632bd-9282-402a-b272-539ea6d99e9b tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "848da7a1-9cec-4715-bbe1-ef7a51b3a5c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.851s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.176906] env[62619]: INFO nova.compute.manager [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] instance snapshotting [ 1794.180118] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845f90fb-6c7e-4330-86bb-64804969758a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.205940] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231d0dd0-dbb2-4ecd-93cc-1809a96d5143 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.216564] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778331, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.491495} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.218487] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d1791572-abf0-49e9-9ccd-ae11e1d9d561/d1791572-abf0-49e9-9ccd-ae11e1d9d561.vmdk to [datastore1] 5cf7ca57-351f-48ab-8758-b30f50cd607f/5cf7ca57-351f-48ab-8758-b30f50cd607f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1794.227751] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac28737-1b41-428e-a318-35f5b198cc7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.234829] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: ca5f5f6b-5303-4af4-adaa-e4aac72a90f8] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1794.270642] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 5cf7ca57-351f-48ab-8758-b30f50cd607f/5cf7ca57-351f-48ab-8758-b30f50cd607f.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1794.272647] env[62619]: DEBUG nova.network.neutron [req-be831541-d67b-451e-9545-13f9b477f354 req-a4725853-53d8-4199-8777-1631e5374022 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updated VIF entry in instance network info cache for port 165f6c4a-b24e-4c32-845f-891bf7478563. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1794.272963] env[62619]: DEBUG nova.network.neutron [req-be831541-d67b-451e-9545-13f9b477f354 req-a4725853-53d8-4199-8777-1631e5374022 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updating instance_info_cache with network_info: [{"id": "165f6c4a-b24e-4c32-845f-891bf7478563", "address": "fa:16:3e:ec:67:49", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165f6c4a-b2", "ovs_interfaceid": "165f6c4a-b24e-4c32-845f-891bf7478563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.278122] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d8e11c8-8dc6-45d9-93c8-6d662f271962 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.298222] env[62619]: DEBUG oslo_concurrency.lockutils [req-be831541-d67b-451e-9545-13f9b477f354 req-a4725853-53d8-4199-8777-1631e5374022 service nova] Releasing lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.307623] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778332, 'name': PowerOffVM_Task, 'duration_secs': 0.567976} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.311360] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1794.311557] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance '917960ca-3870-4e4e-aafe-3c6d77cf7c51' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1794.315287] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1794.315287] env[62619]: value = "task-1778334" [ 1794.315287] env[62619]: _type = "Task" [ 1794.315287] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.323499] env[62619]: DEBUG nova.compute.manager [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1794.337303] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778334, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.352109] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778333, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.800597} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.354861] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0987b6ec-2587-4f15-adbb-f563e19ecce9/0987b6ec-2587-4f15-adbb-f563e19ecce9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1794.355085] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1794.355589] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4b6453c-8df5-4f59-84b7-150e1b92f69e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.364590] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1794.364590] env[62619]: value = "task-1778335" [ 1794.364590] env[62619]: _type = "Task" [ 1794.364590] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.374117] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778335, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.477511] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb7cee1-604d-4b2e-87b2-8cab0e8fde00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.486253] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751e3130-b6a1-48c7-8ac5-7e35b9d2f0a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.520161] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31402198-d618-4512-8c74-6b32e1abfe9d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.528843] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29cab7fa-3349-4647-9cfc-3a6ffbb84ec1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.550527] env[62619]: DEBUG nova.compute.provider_tree [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1794.738577] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1794.739043] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: a802534f-1766-4ea9-9188-803ef197d775] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1794.740904] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c57a2de7-fdfd-4058-a0fc-a3e57f194123 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.750352] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1794.750352] env[62619]: value = "task-1778336" [ 1794.750352] env[62619]: _type = "Task" [ 1794.750352] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.759593] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778336, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.818317] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1794.818582] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1794.818791] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1794.818914] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1794.819068] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1794.819219] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1794.819492] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1794.819666] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1794.819831] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1794.820184] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1794.820184] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1794.825281] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2387e03-ad56-4377-94d6-9306f296bc34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.851604] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.852988] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1794.852988] env[62619]: value = "task-1778337" [ 1794.852988] env[62619]: _type = "Task" [ 1794.852988] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.858944] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.862829] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778337, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.873302] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778335, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.32452} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.873564] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1794.874377] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ed3514-678c-484f-b7b9-95399a1f1885 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.897205] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 0987b6ec-2587-4f15-adbb-f563e19ecce9/0987b6ec-2587-4f15-adbb-f563e19ecce9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1794.897545] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44f1a270-2645-4e14-9593-92f4dba0d24f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.920686] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1794.920686] env[62619]: value = "task-1778338" [ 1794.920686] env[62619]: _type = "Task" [ 1794.920686] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.930321] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.053906] env[62619]: DEBUG nova.scheduler.client.report [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1795.244166] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 7217d898-54ee-46ed-88fa-959c38e988e7] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1795.261102] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778336, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.315780] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "54da64a0-4acf-4025-9b51-7af61dbd55fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.316010] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "54da64a0-4acf-4025-9b51-7af61dbd55fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.335708] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778334, 'name': ReconfigVM_Task, 'duration_secs': 0.933555} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.335966] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 5cf7ca57-351f-48ab-8758-b30f50cd607f/5cf7ca57-351f-48ab-8758-b30f50cd607f.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1795.337079] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'disk_bus': None, 'encryption_secret_uuid': None, 'boot_index': 0, 'size': 0, 'guest_format': None, 'device_type': 'disk', 'encryption_options': None, 'device_name': '/dev/sda', 'encryption_format': None, 'encrypted': False, 'image_id': '27a858d5-7985-4b17-8b01-50adcd8f566c'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'boot_index': None, 'guest_format': None, 'mount_device': '/dev/sdb', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369133', 'volume_id': 'ac33ba75-c333-4e12-8448-75caf34bd9c5', 'name': 'volume-ac33ba75-c333-4e12-8448-75caf34bd9c5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '5cf7ca57-351f-48ab-8758-b30f50cd607f', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac33ba75-c333-4e12-8448-75caf34bd9c5', 'serial': 'ac33ba75-c333-4e12-8448-75caf34bd9c5'}, 'attachment_id': 'a9411a69-18d6-48ca-b716-0bec173d78dd', 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=62619) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1795.337285] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1795.337470] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369133', 'volume_id': 'ac33ba75-c333-4e12-8448-75caf34bd9c5', 'name': 'volume-ac33ba75-c333-4e12-8448-75caf34bd9c5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '5cf7ca57-351f-48ab-8758-b30f50cd607f', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac33ba75-c333-4e12-8448-75caf34bd9c5', 'serial': 'ac33ba75-c333-4e12-8448-75caf34bd9c5'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1795.338248] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab58e04-2085-4056-93ce-64e280d16a90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.357677] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ac450d-d1aa-4ba5-ae3d-2de737c9f005 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.365351] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778337, 'name': ReconfigVM_Task, 'duration_secs': 0.234703} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.377565] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance '917960ca-3870-4e4e-aafe-3c6d77cf7c51' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1795.389841] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] volume-ac33ba75-c333-4e12-8448-75caf34bd9c5/volume-ac33ba75-c333-4e12-8448-75caf34bd9c5.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1795.390537] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ec6f5b9-9e7f-4c07-81fc-05ecd89ad404 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.409275] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1795.409275] env[62619]: value = "task-1778339" [ 1795.409275] env[62619]: _type = "Task" [ 1795.409275] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.420910] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778339, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.431711] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778338, 'name': ReconfigVM_Task, 'duration_secs': 0.320821} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.432085] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 0987b6ec-2587-4f15-adbb-f563e19ecce9/0987b6ec-2587-4f15-adbb-f563e19ecce9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1795.432888] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a9c5850-3b2e-4823-ab5b-251d8d135543 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.441330] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1795.441330] env[62619]: value = "task-1778340" [ 1795.441330] env[62619]: _type = "Task" [ 1795.441330] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.453104] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778340, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.559404] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.983s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.561735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.956s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.563462] env[62619]: INFO nova.compute.claims [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1795.582198] env[62619]: INFO nova.scheduler.client.report [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Deleted allocations for instance 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85 [ 1795.747380] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 1f8c1f26-d4c9-4e06-b7a2-1b211ccc0b7c] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1795.762851] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778336, 'name': CreateSnapshot_Task, 'duration_secs': 0.752483} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.763174] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1795.764039] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7444a4-4723-4435-b011-c30321b215d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.818318] env[62619]: DEBUG nova.compute.manager [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1795.893521] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1795.893885] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1795.894148] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1795.894446] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1795.894679] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1795.894912] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1795.895340] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1795.895599] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1795.895886] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1795.896217] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1795.896538] env[62619]: DEBUG nova.virt.hardware [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1795.911101] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1795.911877] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ec2a2ab-763f-4cf7-ac5f-8b563de6c044 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.953015] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1795.953015] env[62619]: value = "task-1778341" [ 1795.953015] env[62619]: _type = "Task" [ 1795.953015] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.957830] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778339, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.968721] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778340, 'name': Rename_Task, 'duration_secs': 0.237545} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.969129] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1795.969468] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-393f6ca2-fab4-443a-982b-7e1bd83736db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.974986] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778341, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.980624] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1795.980624] env[62619]: value = "task-1778342" [ 1795.980624] env[62619]: _type = "Task" [ 1795.980624] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.991915] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778342, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.093968] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fbc9288-96d0-4eec-93d5-23dfcd3cecf5 tempest-ServersNegativeTestJSON-1288164526 tempest-ServersNegativeTestJSON-1288164526-project-member] Lock "4cd6dafd-4f19-4d0f-8e07-8171a6a71e85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.191s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.251062] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: da806d3f-79f0-4188-a2d8-0beeb9dfec1a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1796.283317] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1796.284258] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a011b71d-38fc-4da6-9bd5-566bf65ff5a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.293995] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1796.293995] env[62619]: value = "task-1778343" [ 1796.293995] env[62619]: _type = "Task" [ 1796.293995] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.302887] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778343, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.340233] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.424303] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778339, 'name': ReconfigVM_Task, 'duration_secs': 0.8341} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.424605] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Reconfigured VM instance instance-00000043 to attach disk [datastore1] volume-ac33ba75-c333-4e12-8448-75caf34bd9c5/volume-ac33ba75-c333-4e12-8448-75caf34bd9c5.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1796.429637] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87f94a7a-c5de-4852-9dfe-9c8df7f71c14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.450142] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1796.450142] env[62619]: value = "task-1778344" [ 1796.450142] env[62619]: _type = "Task" [ 1796.450142] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.460700] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778344, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.469551] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778341, 'name': ReconfigVM_Task, 'duration_secs': 0.234165} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.469971] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1796.471150] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81b38ee-35dd-427b-8412-914cd3040e9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.502528] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 917960ca-3870-4e4e-aafe-3c6d77cf7c51/917960ca-3870-4e4e-aafe-3c6d77cf7c51.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1796.506044] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45143c13-ba89-46a3-b642-0aede6bf4d33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.527898] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778342, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.529378] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1796.529378] env[62619]: value = "task-1778345" [ 1796.529378] env[62619]: _type = "Task" [ 1796.529378] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.538912] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778345, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.757495] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 060427a2-e724-4c51-879e-675154ae5df2] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1796.812110] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778343, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.844872] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf57bc8-e934-4368-a7e1-7803af3dffc3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.856874] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93398b8-c54d-4c6e-ac10-3f08ffe404cb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.892655] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f88a5c8-7c91-4591-8dbe-e8c8d9153d08 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.903244] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df17c848-8dab-4d8b-8d86-e06bca6ad3a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.919550] env[62619]: DEBUG nova.compute.provider_tree [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.960779] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778344, 'name': ReconfigVM_Task, 'duration_secs': 0.278101} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.961109] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369133', 'volume_id': 'ac33ba75-c333-4e12-8448-75caf34bd9c5', 'name': 'volume-ac33ba75-c333-4e12-8448-75caf34bd9c5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '5cf7ca57-351f-48ab-8758-b30f50cd607f', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac33ba75-c333-4e12-8448-75caf34bd9c5', 'serial': 'ac33ba75-c333-4e12-8448-75caf34bd9c5'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1796.961652] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5a0b191-be41-474f-b7e5-bba87be28477 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.971859] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1796.971859] env[62619]: value = "task-1778346" [ 1796.971859] env[62619]: _type = "Task" [ 1796.971859] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.982405] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778346, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.005791] env[62619]: DEBUG oslo_vmware.api [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778342, 'name': PowerOnVM_Task, 'duration_secs': 0.858049} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.006219] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1797.006482] env[62619]: INFO nova.compute.manager [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Took 10.49 seconds to spawn the instance on the hypervisor. [ 1797.007273] env[62619]: DEBUG nova.compute.manager [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1797.007840] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0faecfc-91ce-4eb3-b365-ab0e425a1c9b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.040580] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778345, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.263978] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: cef20063-96f0-46cc-9f7d-4436b60216c6] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1797.308703] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778343, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.424200] env[62619]: DEBUG nova.scheduler.client.report [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1797.484721] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778346, 'name': Rename_Task, 'duration_secs': 0.217134} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.485029] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1797.485289] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3d539f9-926a-46ec-80f3-e0de44337003 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.494748] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1797.494748] env[62619]: value = "task-1778347" [ 1797.494748] env[62619]: _type = "Task" [ 1797.494748] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.503832] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778347, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.531095] env[62619]: INFO nova.compute.manager [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Took 21.93 seconds to build instance. [ 1797.544131] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778345, 'name': ReconfigVM_Task, 'duration_secs': 0.556906} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.544449] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 917960ca-3870-4e4e-aafe-3c6d77cf7c51/917960ca-3870-4e4e-aafe-3c6d77cf7c51.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1797.544770] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance '917960ca-3870-4e4e-aafe-3c6d77cf7c51' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1797.768272] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 597c0f95-5798-4022-8e2e-89a700698d7a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1797.806698] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778343, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.930839] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.369s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.932280] env[62619]: DEBUG nova.compute.manager [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1797.935651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.250s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.936038] env[62619]: DEBUG nova.objects.instance [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lazy-loading 'resources' on Instance uuid 11869077-b428-413f-9f8f-7eac08d2d9ec {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1798.005970] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778347, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.037252] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8b6ea58-7bc2-43d9-90e4-d5e1975ce10b tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "0987b6ec-2587-4f15-adbb-f563e19ecce9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.444s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.052275] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02248b23-28e2-4122-bf5e-fa78d5b0b181 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.075417] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704228e7-c74a-4305-93c5-cef960d5dfab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.096447] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance '917960ca-3870-4e4e-aafe-3c6d77cf7c51' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1798.271596] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 40eeb844-7423-4818-8095-81062c7e6392] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1798.307468] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778343, 'name': CloneVM_Task, 'duration_secs': 1.735209} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.308025] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Created linked-clone VM from snapshot [ 1798.308824] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3797b6fd-0ec4-4543-ab1d-934ceb22c18d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.317552] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Uploading image 304cf0af-24f3-4f16-8ae8-bda782d14051 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1798.340192] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1798.340192] env[62619]: value = "vm-369146" [ 1798.340192] env[62619]: _type = "VirtualMachine" [ 1798.340192] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1798.340470] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ab3ea422-0f6f-495c-ac38-b7e6149045d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.348780] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lease: (returnval){ [ 1798.348780] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bbde9d-2acd-192a-8500-edf20a56f7d4" [ 1798.348780] env[62619]: _type = "HttpNfcLease" [ 1798.348780] env[62619]: } obtained for exporting VM: (result){ [ 1798.348780] env[62619]: value = "vm-369146" [ 1798.348780] env[62619]: _type = "VirtualMachine" [ 1798.348780] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1798.349083] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the lease: (returnval){ [ 1798.349083] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bbde9d-2acd-192a-8500-edf20a56f7d4" [ 1798.349083] env[62619]: _type = "HttpNfcLease" [ 1798.349083] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1798.356409] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1798.356409] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bbde9d-2acd-192a-8500-edf20a56f7d4" [ 1798.356409] env[62619]: _type = "HttpNfcLease" [ 1798.356409] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1798.438326] env[62619]: DEBUG nova.compute.utils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1798.439551] env[62619]: DEBUG nova.compute.manager [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1798.439798] env[62619]: DEBUG nova.network.neutron [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1798.485437] env[62619]: DEBUG nova.policy [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c02533fb867d44eb88c0ec3cf3f3982b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d9cfa8d5b184320ae434919598191ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1798.505765] env[62619]: DEBUG oslo_vmware.api [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778347, 'name': PowerOnVM_Task, 'duration_secs': 0.928665} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.506052] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1798.618549] env[62619]: DEBUG nova.compute.manager [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1798.618549] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884107f8-e46d-4c46-9952-157242337f33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.762396] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7061c6-84ba-4c56-b70b-3939ca01eba3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.775149] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e028aff8-0407-41c5-a9ed-87bb12d73032 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.778840] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 9014ef05-64d1-4bd6-9f2e-db58003b6520] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1798.815418] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e560c8-3866-4fb7-a588-572ccc24bdb8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.826867] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339bb5db-643e-4d80-b458-f2270f18d245 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.844418] env[62619]: DEBUG nova.compute.provider_tree [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1798.858392] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1798.858392] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bbde9d-2acd-192a-8500-edf20a56f7d4" [ 1798.858392] env[62619]: _type = "HttpNfcLease" [ 1798.858392] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1798.858895] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1798.858895] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bbde9d-2acd-192a-8500-edf20a56f7d4" [ 1798.858895] env[62619]: _type = "HttpNfcLease" [ 1798.858895] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1798.859973] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03782785-d01a-4380-b2c0-6e71c4ad07e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.867877] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d9bba-856a-e354-4a86-902fa420364f/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1798.868144] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d9bba-856a-e354-4a86-902fa420364f/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1798.949636] env[62619]: DEBUG nova.compute.manager [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1798.988947] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7239c764-4227-4ee3-93dc-bec6fa26114f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.036149] env[62619]: DEBUG nova.network.neutron [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Successfully created port: 105882b4-fd3c-4267-bcf1-662dacfc582f {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1799.142121] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d077410f-0715-4d22-ac4b-68f771482efc tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 35.022s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.281738] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 94c9a119-5c04-4550-b55d-a4a2985385d3] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1799.348409] env[62619]: DEBUG nova.scheduler.client.report [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1799.561630] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "0987b6ec-2587-4f15-adbb-f563e19ecce9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.562088] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "0987b6ec-2587-4f15-adbb-f563e19ecce9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.562403] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "0987b6ec-2587-4f15-adbb-f563e19ecce9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.562682] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "0987b6ec-2587-4f15-adbb-f563e19ecce9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.562915] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "0987b6ec-2587-4f15-adbb-f563e19ecce9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.565470] env[62619]: INFO nova.compute.manager [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Terminating instance [ 1799.785466] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: ef41dd29-1270-4071-9e89-20132131de2d] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1799.817201] env[62619]: DEBUG nova.network.neutron [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Port 5911524f-a8b5-4591-a312-ea0cefac24df binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1799.854251] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.919s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.857022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.849s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.857585] env[62619]: DEBUG nova.objects.instance [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lazy-loading 'resources' on Instance uuid 88f79718-97d0-432c-8515-b60ab3dfd7e0 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1799.880507] env[62619]: INFO nova.scheduler.client.report [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted allocations for instance 11869077-b428-413f-9f8f-7eac08d2d9ec [ 1799.959854] env[62619]: DEBUG nova.compute.manager [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1799.988526] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1799.988876] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1799.989518] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1799.989794] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1799.989995] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1799.990208] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1799.990436] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1799.990599] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1799.990763] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1799.990923] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1799.991102] env[62619]: DEBUG nova.virt.hardware [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1799.991999] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b5d113-5cef-499d-888e-82cf36e4ae19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.002594] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03adfc0-c931-471c-9bed-356a2429b4c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.071059] env[62619]: DEBUG nova.compute.manager [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1800.071059] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1800.071809] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f016a959-186a-4276-bb94-164456a23520 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.082546] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1800.083198] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba5437cb-4b96-448f-88f0-b36325988b76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.092366] env[62619]: DEBUG oslo_vmware.api [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1800.092366] env[62619]: value = "task-1778349" [ 1800.092366] env[62619]: _type = "Task" [ 1800.092366] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.101916] env[62619]: DEBUG oslo_vmware.api [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.289983] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: d4230edc-cfda-4b9f-ab42-2f39c699ff03] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1800.360557] env[62619]: DEBUG nova.objects.instance [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lazy-loading 'numa_topology' on Instance uuid 88f79718-97d0-432c-8515-b60ab3dfd7e0 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1800.388330] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c6fff07-6716-4be1-b817-a6fbef1ef11d tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "11869077-b428-413f-9f8f-7eac08d2d9ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.680s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.551247] env[62619]: DEBUG nova.compute.manager [req-ef5d3326-930d-4fec-b1c0-f00c70aa6b5e req-855ed933-e240-4fc2-99aa-f7eb9c65018b service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Received event network-vif-plugged-105882b4-fd3c-4267-bcf1-662dacfc582f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1800.551458] env[62619]: DEBUG oslo_concurrency.lockutils [req-ef5d3326-930d-4fec-b1c0-f00c70aa6b5e req-855ed933-e240-4fc2-99aa-f7eb9c65018b service nova] Acquiring lock "cee0356c-25d7-48ca-be09-16b0e1b56a41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.551658] env[62619]: DEBUG oslo_concurrency.lockutils [req-ef5d3326-930d-4fec-b1c0-f00c70aa6b5e req-855ed933-e240-4fc2-99aa-f7eb9c65018b service nova] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.551816] env[62619]: DEBUG oslo_concurrency.lockutils [req-ef5d3326-930d-4fec-b1c0-f00c70aa6b5e req-855ed933-e240-4fc2-99aa-f7eb9c65018b service nova] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.551975] env[62619]: DEBUG nova.compute.manager [req-ef5d3326-930d-4fec-b1c0-f00c70aa6b5e req-855ed933-e240-4fc2-99aa-f7eb9c65018b service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] No waiting events found dispatching network-vif-plugged-105882b4-fd3c-4267-bcf1-662dacfc582f {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1800.555239] env[62619]: WARNING nova.compute.manager [req-ef5d3326-930d-4fec-b1c0-f00c70aa6b5e req-855ed933-e240-4fc2-99aa-f7eb9c65018b service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Received unexpected event network-vif-plugged-105882b4-fd3c-4267-bcf1-662dacfc582f for instance with vm_state building and task_state spawning. [ 1800.606831] env[62619]: DEBUG oslo_vmware.api [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778349, 'name': PowerOffVM_Task, 'duration_secs': 0.312434} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.607119] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1800.607326] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1800.607594] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6e48ede-cd73-4d5a-80b4-f6b99c3414cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.675848] env[62619]: DEBUG nova.network.neutron [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Successfully updated port: 105882b4-fd3c-4267-bcf1-662dacfc582f {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1800.701194] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1800.701451] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1800.701631] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleting the datastore file [datastore1] 0987b6ec-2587-4f15-adbb-f563e19ecce9 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1800.701894] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44e16646-9e76-4b91-986a-ebf2b057b2f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.712863] env[62619]: DEBUG oslo_vmware.api [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for the task: (returnval){ [ 1800.712863] env[62619]: value = "task-1778351" [ 1800.712863] env[62619]: _type = "Task" [ 1800.712863] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.723586] env[62619]: DEBUG oslo_vmware.api [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.793604] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 2a41be15-efaf-4e78-a278-2711cb11e98f] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1800.852432] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.852744] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.852957] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.863501] env[62619]: DEBUG nova.objects.base [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Object Instance<88f79718-97d0-432c-8515-b60ab3dfd7e0> lazy-loaded attributes: resources,numa_topology {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1801.153895] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ecbf50-ff37-4cb5-b1be-1a556e8982f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.163221] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fc4a4d-bb9b-429b-a50e-4d2e8091ffa1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.200638] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "refresh_cache-cee0356c-25d7-48ca-be09-16b0e1b56a41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.200787] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired lock "refresh_cache-cee0356c-25d7-48ca-be09-16b0e1b56a41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.200944] env[62619]: DEBUG nova.network.neutron [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1801.203506] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4403dd-0c95-4fad-8c44-8d46613033a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.212426] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db645cf4-f77b-48a2-9b16-066f191822f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.228463] env[62619]: DEBUG oslo_vmware.api [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Task: {'id': task-1778351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141635} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.236350] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1801.236648] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1801.236866] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1801.237102] env[62619]: INFO nova.compute.manager [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1801.237458] env[62619]: DEBUG oslo.service.loopingcall [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1801.238527] env[62619]: DEBUG nova.compute.provider_tree [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1801.240085] env[62619]: DEBUG nova.compute.manager [-] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1801.243573] env[62619]: DEBUG nova.network.neutron [-] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1801.298913] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 6be4f813-7171-4515-a728-5cf34665205a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1801.738502] env[62619]: DEBUG nova.network.neutron [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1801.761819] env[62619]: ERROR nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [req-bc436742-b2bf-4ac4-a4b4-eada6ecb41ed] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bc436742-b2bf-4ac4-a4b4-eada6ecb41ed"}]} [ 1801.779104] env[62619]: DEBUG nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1801.796874] env[62619]: DEBUG nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1801.796874] env[62619]: DEBUG nova.compute.provider_tree [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1801.805944] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 7c058337-1684-4553-8e96-dd2cd1814a15] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1801.817579] env[62619]: DEBUG nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1801.840031] env[62619]: DEBUG nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1801.887278] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.887480] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.887660] env[62619]: DEBUG nova.network.neutron [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1801.889477] env[62619]: DEBUG nova.network.neutron [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Updating instance_info_cache with network_info: [{"id": "105882b4-fd3c-4267-bcf1-662dacfc582f", "address": "fa:16:3e:e8:02:49", "network": {"id": "91d6666a-8674-4957-8cbe-8e730e5c0741", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1893713214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9cfa8d5b184320ae434919598191ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap105882b4-fd", "ovs_interfaceid": "105882b4-fd3c-4267-bcf1-662dacfc582f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.002286] env[62619]: DEBUG nova.network.neutron [-] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.084154] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7667d832-7016-42fd-8b13-71d64aab91aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.094171] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331f4ccf-6ed4-496e-81ee-1ad6afc4844f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.125179] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a582460f-5614-49b9-96a5-75051674f1c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.134100] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af761e7-9a16-489c-9991-dcf67636095b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.149337] env[62619]: DEBUG nova.compute.provider_tree [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1802.307127] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 80363e16-5dd2-42ad-9ead-25b121d62211] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1802.393597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Releasing lock "refresh_cache-cee0356c-25d7-48ca-be09-16b0e1b56a41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.393941] env[62619]: DEBUG nova.compute.manager [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Instance network_info: |[{"id": "105882b4-fd3c-4267-bcf1-662dacfc582f", "address": "fa:16:3e:e8:02:49", "network": {"id": "91d6666a-8674-4957-8cbe-8e730e5c0741", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1893713214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9cfa8d5b184320ae434919598191ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap105882b4-fd", "ovs_interfaceid": "105882b4-fd3c-4267-bcf1-662dacfc582f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1802.394375] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:02:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '030ecc21-dc1c-4283-854e-88e623b3970a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '105882b4-fd3c-4267-bcf1-662dacfc582f', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1802.402714] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Creating folder: Project (3d9cfa8d5b184320ae434919598191ef). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1802.403384] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ce41f74-9e8f-4175-88c3-afb6cd718055 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.420414] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Created folder: Project (3d9cfa8d5b184320ae434919598191ef) in parent group-v368875. [ 1802.420604] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Creating folder: Instances. Parent ref: group-v369147. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1802.420858] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-269eae1f-c458-4182-a4fe-ca38443e01eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.436131] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Created folder: Instances in parent group-v369147. [ 1802.436651] env[62619]: DEBUG oslo.service.loopingcall [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1802.436651] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1802.436776] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f176eff2-d7da-43dd-903b-c8a3e98a1464 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.461155] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1802.461155] env[62619]: value = "task-1778354" [ 1802.461155] env[62619]: _type = "Task" [ 1802.461155] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.470610] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778354, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.505668] env[62619]: INFO nova.compute.manager [-] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Took 1.27 seconds to deallocate network for instance. [ 1802.605627] env[62619]: DEBUG nova.compute.manager [req-1715e344-c776-4920-bb65-542271a3450d req-804d7972-2a1d-4285-b13e-11cac62dcae6 service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Received event network-changed-105882b4-fd3c-4267-bcf1-662dacfc582f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1802.605627] env[62619]: DEBUG nova.compute.manager [req-1715e344-c776-4920-bb65-542271a3450d req-804d7972-2a1d-4285-b13e-11cac62dcae6 service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Refreshing instance network info cache due to event network-changed-105882b4-fd3c-4267-bcf1-662dacfc582f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1802.605627] env[62619]: DEBUG oslo_concurrency.lockutils [req-1715e344-c776-4920-bb65-542271a3450d req-804d7972-2a1d-4285-b13e-11cac62dcae6 service nova] Acquiring lock "refresh_cache-cee0356c-25d7-48ca-be09-16b0e1b56a41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.605627] env[62619]: DEBUG oslo_concurrency.lockutils [req-1715e344-c776-4920-bb65-542271a3450d req-804d7972-2a1d-4285-b13e-11cac62dcae6 service nova] Acquired lock "refresh_cache-cee0356c-25d7-48ca-be09-16b0e1b56a41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.605627] env[62619]: DEBUG nova.network.neutron [req-1715e344-c776-4920-bb65-542271a3450d req-804d7972-2a1d-4285-b13e-11cac62dcae6 service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Refreshing network info cache for port 105882b4-fd3c-4267-bcf1-662dacfc582f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1802.655989] env[62619]: DEBUG nova.network.neutron [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance_info_cache with network_info: [{"id": "5911524f-a8b5-4591-a312-ea0cefac24df", "address": "fa:16:3e:bd:c4:f9", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5911524f-a8", "ovs_interfaceid": "5911524f-a8b5-4591-a312-ea0cefac24df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.670052] env[62619]: ERROR nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [req-204b4ff6-33b4-45c1-8eee-c8e0bf1adcc0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-204b4ff6-33b4-45c1-8eee-c8e0bf1adcc0"}]} [ 1802.690403] env[62619]: DEBUG nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1802.705589] env[62619]: DEBUG nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1802.705831] env[62619]: DEBUG nova.compute.provider_tree [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1802.718435] env[62619]: DEBUG nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1802.737661] env[62619]: DEBUG nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1802.811633] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 6dbe4133-a6ba-4bba-9eb9-47a3d2691eec] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1802.921165] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "70265068-1185-4f23-b0b4-ed2378c17a89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.921385] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "70265068-1185-4f23-b0b4-ed2378c17a89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.974842] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778354, 'name': CreateVM_Task, 'duration_secs': 0.45726} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.974842] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1802.975730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.975730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.976032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1802.976924] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c20fa226-810f-4e8f-86b9-5cf3f4311f3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.983841] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1802.983841] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526cb7c2-20a5-e52f-f382-fb98913eb698" [ 1802.983841] env[62619]: _type = "Task" [ 1802.983841] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.995771] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526cb7c2-20a5-e52f-f382-fb98913eb698, 'name': SearchDatastore_Task, 'duration_secs': 0.010202} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.997681] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.997932] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1802.998233] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.998384] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.998563] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1802.999054] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79debbd6-b8d2-480b-8b5f-beaaa214f423 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.008823] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1803.009025] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1803.010769] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffb3f1cd-d250-46f4-9689-a353f269b03a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.013950] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd84936-cc2d-44a1-bcae-56ea3b4e5cdc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.017213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.023510] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1803.023510] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52888f02-469e-a86b-c3d4-afdf0ed0c0a4" [ 1803.023510] env[62619]: _type = "Task" [ 1803.023510] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.024675] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102eebed-79c6-405a-9a12-fbae20d31496 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.036270] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52888f02-469e-a86b-c3d4-afdf0ed0c0a4, 'name': SearchDatastore_Task, 'duration_secs': 0.010934} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.063072] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d9fbb3b-681c-41b1-ba3a-b4a5202623b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.066235] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b1f398-785a-4fa2-be04-685c0126a197 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.075101] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1803.075101] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523f1a8d-92d5-688d-d450-bf57ba46b4ac" [ 1803.075101] env[62619]: _type = "Task" [ 1803.075101] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.076485] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0796a1e-720b-4c63-8a5a-da44712690a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.093109] env[62619]: DEBUG nova.compute.provider_tree [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1803.097772] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523f1a8d-92d5-688d-d450-bf57ba46b4ac, 'name': SearchDatastore_Task, 'duration_secs': 0.011335} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.098239] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.098497] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cee0356c-25d7-48ca-be09-16b0e1b56a41/cee0356c-25d7-48ca-be09-16b0e1b56a41.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1803.098741] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab70c4c9-b465-4e9f-890a-0391653df46c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.106261] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1803.106261] env[62619]: value = "task-1778355" [ 1803.106261] env[62619]: _type = "Task" [ 1803.106261] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.117038] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778355, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.159088] env[62619]: DEBUG oslo_concurrency.lockutils [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.316185] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: b6aae13f-0711-4421-9d55-de7ece3e4b89] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1803.349046] env[62619]: DEBUG nova.network.neutron [req-1715e344-c776-4920-bb65-542271a3450d req-804d7972-2a1d-4285-b13e-11cac62dcae6 service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Updated VIF entry in instance network info cache for port 105882b4-fd3c-4267-bcf1-662dacfc582f. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1803.349254] env[62619]: DEBUG nova.network.neutron [req-1715e344-c776-4920-bb65-542271a3450d req-804d7972-2a1d-4285-b13e-11cac62dcae6 service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Updating instance_info_cache with network_info: [{"id": "105882b4-fd3c-4267-bcf1-662dacfc582f", "address": "fa:16:3e:e8:02:49", "network": {"id": "91d6666a-8674-4957-8cbe-8e730e5c0741", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1893713214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9cfa8d5b184320ae434919598191ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap105882b4-fd", "ovs_interfaceid": "105882b4-fd3c-4267-bcf1-662dacfc582f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.423575] env[62619]: DEBUG nova.compute.manager [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1803.618662] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778355, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.631333] env[62619]: DEBUG nova.scheduler.client.report [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 146 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1803.631599] env[62619]: DEBUG nova.compute.provider_tree [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 146 to 147 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1803.631778] env[62619]: DEBUG nova.compute.provider_tree [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1803.669190] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f084550-a83a-4c8c-851e-2e493846460e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.677467] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b993db-40fa-47df-8cb9-9a25bc2d0560 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.820744] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 91ce0ab3-4fa4-4992-995a-0baeec91d9d0] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1803.851664] env[62619]: DEBUG oslo_concurrency.lockutils [req-1715e344-c776-4920-bb65-542271a3450d req-804d7972-2a1d-4285-b13e-11cac62dcae6 service nova] Releasing lock "refresh_cache-cee0356c-25d7-48ca-be09-16b0e1b56a41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.851971] env[62619]: DEBUG nova.compute.manager [req-1715e344-c776-4920-bb65-542271a3450d req-804d7972-2a1d-4285-b13e-11cac62dcae6 service nova] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Received event network-vif-deleted-d1c7728e-1484-4294-ab32-b78e9572ada7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1803.946190] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.122059] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778355, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524673} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.122059] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cee0356c-25d7-48ca-be09-16b0e1b56a41/cee0356c-25d7-48ca-be09-16b0e1b56a41.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1804.122294] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1804.122559] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-326e5c60-6d24-43dd-9dec-2a15e7f59501 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.130835] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1804.130835] env[62619]: value = "task-1778356" [ 1804.130835] env[62619]: _type = "Task" [ 1804.130835] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.141405] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.284s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.144160] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778356, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.144802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.269s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.146310] env[62619]: INFO nova.compute.claims [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1804.324164] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4374c102-a6fe-45ef-ad49-a1295f96899a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1804.642099] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778356, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072403} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.642479] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1804.643343] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08264c09-250c-43d0-ab53-4943fcd1b6cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.651167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dc55bdc2-efe1-4832-aa1c-6a10507d8446 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 36.934s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.654627] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 15.635s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.654865] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "88f79718-97d0-432c-8515-b60ab3dfd7e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.655110] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.655316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.675368] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] cee0356c-25d7-48ca-be09-16b0e1b56a41/cee0356c-25d7-48ca-be09-16b0e1b56a41.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1804.675938] env[62619]: INFO nova.compute.manager [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Terminating instance [ 1804.678186] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e35a876a-41a6-48ea-ac1b-0e4373790bdc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.701888] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1804.701888] env[62619]: value = "task-1778357" [ 1804.701888] env[62619]: _type = "Task" [ 1804.701888] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.711607] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.777038] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe6a5ea-9bb7-43f3-8fd2-53e096403ab9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.799016] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ba1332-0e09-43d3-be0c-0d80ca400ff9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.807408] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance '917960ca-3870-4e4e-aafe-3c6d77cf7c51' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1804.827146] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e81c03f7-9c0e-46bd-9641-aced82038eca] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1805.196074] env[62619]: DEBUG nova.compute.manager [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1805.196444] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1805.196972] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51d0eb12-fc93-40de-9e83-60bad963a82d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.209986] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90fbd7e-f5d8-4c6f-ac3f-7ef97f7b9146 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.225528] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778357, 'name': ReconfigVM_Task, 'duration_secs': 0.332758} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.226416] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Reconfigured VM instance instance-00000061 to attach disk [datastore1] cee0356c-25d7-48ca-be09-16b0e1b56a41/cee0356c-25d7-48ca-be09-16b0e1b56a41.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1805.227098] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1069994a-79ef-4716-b3cd-96aa0666b75d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.234068] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1805.234068] env[62619]: value = "task-1778358" [ 1805.234068] env[62619]: _type = "Task" [ 1805.234068] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.250571] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 88f79718-97d0-432c-8515-b60ab3dfd7e0 could not be found. [ 1805.250920] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1805.251159] env[62619]: INFO nova.compute.manager [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1805.251440] env[62619]: DEBUG oslo.service.loopingcall [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1805.251835] env[62619]: DEBUG nova.compute.manager [-] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1805.252033] env[62619]: DEBUG nova.network.neutron [-] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1805.259357] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778358, 'name': Rename_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.314026] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1805.314361] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fadbae4b-c8e5-4d14-bfc6-ed5f91771ae6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.322250] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1805.322250] env[62619]: value = "task-1778359" [ 1805.322250] env[62619]: _type = "Task" [ 1805.322250] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.331093] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: d16bebd1-a144-4d73-8eb6-8ab12a08fe69] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1805.333087] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.462167] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00428617-da21-4a33-9329-f2f820a12f16 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.471053] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e953cc-7445-4f0a-b9c3-2ae57ff8026a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.506829] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c4598e-28bd-40f3-8b30-59f2b8e78151 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.514514] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4b40f4-25b5-44db-8c6f-d9e60eb9fc13 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.534156] env[62619]: DEBUG nova.compute.provider_tree [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1805.744644] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778358, 'name': Rename_Task, 'duration_secs': 0.174092} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.746058] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1805.746058] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87b9f550-dd07-4212-8bfd-32cbc021871c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.752761] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1805.752761] env[62619]: value = "task-1778360" [ 1805.752761] env[62619]: _type = "Task" [ 1805.752761] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.760612] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778360, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.834427] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778359, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.836975] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e9ca5148-f188-4a15-83ae-8f3d730b0dab] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1806.021367] env[62619]: DEBUG nova.network.neutron [-] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.075080] env[62619]: DEBUG nova.scheduler.client.report [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 147 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1806.075423] env[62619]: DEBUG nova.compute.provider_tree [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 147 to 148 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1806.075617] env[62619]: DEBUG nova.compute.provider_tree [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1806.264082] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778360, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.334369] env[62619]: DEBUG oslo_vmware.api [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778359, 'name': PowerOnVM_Task, 'duration_secs': 0.608285} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.334825] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1806.335142] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-30e90256-4e9e-4791-abfa-cc73aaafeb20 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance '917960ca-3870-4e4e-aafe-3c6d77cf7c51' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1806.339898] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 312aed5b-a66e-4428-ac1b-483dc2b38291] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1806.524770] env[62619]: INFO nova.compute.manager [-] [instance: 88f79718-97d0-432c-8515-b60ab3dfd7e0] Took 1.27 seconds to deallocate network for instance. [ 1806.582102] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.437s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.583015] env[62619]: DEBUG nova.compute.manager [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1806.586843] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.598s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.589036] env[62619]: INFO nova.compute.claims [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1806.763990] env[62619]: DEBUG oslo_vmware.api [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778360, 'name': PowerOnVM_Task, 'duration_secs': 0.536938} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.764276] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1806.764514] env[62619]: INFO nova.compute.manager [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Took 6.80 seconds to spawn the instance on the hypervisor. [ 1806.764695] env[62619]: DEBUG nova.compute.manager [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1806.765495] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127a58f3-bf47-48b9-ab20-9d92346ee1c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.844462] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4ee81568-ad9a-4ded-b6fe-15503d85968e] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1807.095079] env[62619]: DEBUG nova.compute.utils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1807.097884] env[62619]: DEBUG nova.compute.manager [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1807.098059] env[62619]: DEBUG nova.network.neutron [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1807.149534] env[62619]: DEBUG nova.policy [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c02533fb867d44eb88c0ec3cf3f3982b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d9cfa8d5b184320ae434919598191ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1807.286660] env[62619]: INFO nova.compute.manager [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Took 20.70 seconds to build instance. [ 1807.349704] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: fb231b38-950e-4c86-bfe5-4c10a304910f] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1807.430500] env[62619]: DEBUG nova.network.neutron [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Successfully created port: e127117d-d3f7-4878-bd28-c36eddba80b8 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1807.558514] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92db66a5-b6cf-4a0d-9cc6-896841eb9560 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "88f79718-97d0-432c-8515-b60ab3dfd7e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.904s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.598589] env[62619]: DEBUG nova.compute.manager [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1807.788532] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c4cee1a-b676-4b56-a770-dc7d7affe872 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.211s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.853361] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: ed34ae20-a891-45aa-8124-f36f264937f8] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1807.866559] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b9fa03-772a-457b-815f-ba8cb2a8c732 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.877508] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fe27e8-8f82-44eb-8197-a57d95d0672d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.921028] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb24df0-f242-472b-9cf4-7d78be0d60a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.930955] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac8487e-d44b-4cd6-84e5-01122ca5f92b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.948758] env[62619]: DEBUG nova.compute.provider_tree [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1808.048041] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d9bba-856a-e354-4a86-902fa420364f/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1808.049015] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151b564d-5805-42d9-8db0-313ecfbb3177 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.056213] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d9bba-856a-e354-4a86-902fa420364f/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1808.056400] env[62619]: ERROR oslo_vmware.rw_handles [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d9bba-856a-e354-4a86-902fa420364f/disk-0.vmdk due to incomplete transfer. [ 1808.056659] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0a9c7b51-b27d-4835-afb8-a18bfb1a7d71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.066021] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525d9bba-856a-e354-4a86-902fa420364f/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1808.066250] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Uploaded image 304cf0af-24f3-4f16-8ae8-bda782d14051 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1808.068473] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1808.068760] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1ed9f983-5391-47ed-a65a-342e48bdfbec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.077384] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1808.077384] env[62619]: value = "task-1778361" [ 1808.077384] env[62619]: _type = "Task" [ 1808.077384] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.089359] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778361, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.191877] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "1d40d434-fa8f-463e-908a-24c61538fe33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.192124] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1d40d434-fa8f-463e-908a-24c61538fe33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.356918] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: aa4906f1-e801-4df0-819e-8c5fb5930fb5] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1808.451786] env[62619]: DEBUG nova.scheduler.client.report [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1808.591092] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778361, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.611147] env[62619]: DEBUG nova.compute.manager [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1808.634839] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1808.635107] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1808.635292] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1808.635458] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1808.635605] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1808.635759] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1808.635958] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1808.636134] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1808.636303] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1808.636466] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1808.636637] env[62619]: DEBUG nova.virt.hardware [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1808.637581] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d72d59-8046-49d2-a692-c2e96a0ef3c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.647275] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7942c2a-d45b-428f-9fa9-9b585b119f29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.694366] env[62619]: DEBUG nova.compute.manager [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1808.860611] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4763e489-5aeb-4dc0-b327-b79a55afdfe3] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1808.874995] env[62619]: DEBUG nova.compute.manager [req-6a2d735e-046d-4b23-b9fc-1718b47c37c2 req-14662677-2f85-4d56-8afc-1274d95cb15a service nova] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Received event network-vif-plugged-e127117d-d3f7-4878-bd28-c36eddba80b8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1808.875250] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a2d735e-046d-4b23-b9fc-1718b47c37c2 req-14662677-2f85-4d56-8afc-1274d95cb15a service nova] Acquiring lock "a250f05d-cd74-436d-b656-2a9e55527809-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.875464] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a2d735e-046d-4b23-b9fc-1718b47c37c2 req-14662677-2f85-4d56-8afc-1274d95cb15a service nova] Lock "a250f05d-cd74-436d-b656-2a9e55527809-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.875632] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a2d735e-046d-4b23-b9fc-1718b47c37c2 req-14662677-2f85-4d56-8afc-1274d95cb15a service nova] Lock "a250f05d-cd74-436d-b656-2a9e55527809-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.875867] env[62619]: DEBUG nova.compute.manager [req-6a2d735e-046d-4b23-b9fc-1718b47c37c2 req-14662677-2f85-4d56-8afc-1274d95cb15a service nova] [instance: a250f05d-cd74-436d-b656-2a9e55527809] No waiting events found dispatching network-vif-plugged-e127117d-d3f7-4878-bd28-c36eddba80b8 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1808.876196] env[62619]: WARNING nova.compute.manager [req-6a2d735e-046d-4b23-b9fc-1718b47c37c2 req-14662677-2f85-4d56-8afc-1274d95cb15a service nova] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Received unexpected event network-vif-plugged-e127117d-d3f7-4878-bd28-c36eddba80b8 for instance with vm_state building and task_state spawning. [ 1808.956844] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.957416] env[62619]: DEBUG nova.compute.manager [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1808.965394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.876s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.965647] env[62619]: DEBUG nova.objects.instance [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lazy-loading 'resources' on Instance uuid f3345332-5a22-4a1c-ac74-4e8f2ceb3f15 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1808.978184] env[62619]: DEBUG nova.network.neutron [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Successfully updated port: e127117d-d3f7-4878-bd28-c36eddba80b8 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1809.040224] env[62619]: DEBUG nova.network.neutron [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Port 5911524f-a8b5-4591-a312-ea0cefac24df binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1809.040511] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.040733] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.040953] env[62619]: DEBUG nova.network.neutron [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1809.088636] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778361, 'name': Destroy_Task, 'duration_secs': 0.945465} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.088902] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Destroyed the VM [ 1809.089155] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1809.089418] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e043763f-641f-4a61-b524-afa9d1d1a888 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.098035] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1809.098035] env[62619]: value = "task-1778362" [ 1809.098035] env[62619]: _type = "Task" [ 1809.098035] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.106586] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778362, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.217837] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.365313] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: ac03bcf3-61df-4557-8018-0ad54ef30f17] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1809.468981] env[62619]: DEBUG nova.compute.utils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1809.470569] env[62619]: DEBUG nova.compute.manager [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1809.471224] env[62619]: DEBUG nova.network.neutron [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1809.482785] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "refresh_cache-a250f05d-cd74-436d-b656-2a9e55527809" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.482785] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired lock "refresh_cache-a250f05d-cd74-436d-b656-2a9e55527809" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.482785] env[62619]: DEBUG nova.network.neutron [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1809.523954] env[62619]: DEBUG nova.policy [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c02533fb867d44eb88c0ec3cf3f3982b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d9cfa8d5b184320ae434919598191ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1809.607844] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778362, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.736041] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83458e4b-a5ff-4900-9cf1-68d29195cdc1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.746392] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987417e7-0cd8-4288-9c92-033cf4042ee5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.784128] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6addc32-c014-450a-b60d-8a55479c047d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.789075] env[62619]: DEBUG nova.network.neutron [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance_info_cache with network_info: [{"id": "5911524f-a8b5-4591-a312-ea0cefac24df", "address": "fa:16:3e:bd:c4:f9", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5911524f-a8", "ovs_interfaceid": "5911524f-a8b5-4591-a312-ea0cefac24df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1809.797469] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e228920b-7ebb-4bcc-982a-6db452dc67fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.819754] env[62619]: DEBUG nova.compute.provider_tree [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.869223] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1809.869385] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Cleaning up deleted instances with incomplete migration {{(pid=62619) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11647}} [ 1809.914892] env[62619]: DEBUG nova.network.neutron [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Successfully created port: 67553e4f-60e3-4b66-acde-8a299ea8545b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1809.976473] env[62619]: DEBUG nova.compute.manager [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1810.017217] env[62619]: DEBUG nova.network.neutron [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1810.108451] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778362, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.149769] env[62619]: DEBUG nova.network.neutron [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Updating instance_info_cache with network_info: [{"id": "e127117d-d3f7-4878-bd28-c36eddba80b8", "address": "fa:16:3e:57:64:1c", "network": {"id": "91d6666a-8674-4957-8cbe-8e730e5c0741", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1893713214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9cfa8d5b184320ae434919598191ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape127117d-d3", "ovs_interfaceid": "e127117d-d3f7-4878-bd28-c36eddba80b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.297490] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.323335] env[62619]: DEBUG nova.scheduler.client.report [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1810.372019] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1810.609942] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778362, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.652475] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Releasing lock "refresh_cache-a250f05d-cd74-436d-b656-2a9e55527809" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.652809] env[62619]: DEBUG nova.compute.manager [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Instance network_info: |[{"id": "e127117d-d3f7-4878-bd28-c36eddba80b8", "address": "fa:16:3e:57:64:1c", "network": {"id": "91d6666a-8674-4957-8cbe-8e730e5c0741", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1893713214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9cfa8d5b184320ae434919598191ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape127117d-d3", "ovs_interfaceid": "e127117d-d3f7-4878-bd28-c36eddba80b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1810.653271] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:64:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '030ecc21-dc1c-4283-854e-88e623b3970a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e127117d-d3f7-4878-bd28-c36eddba80b8', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1810.660981] env[62619]: DEBUG oslo.service.loopingcall [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1810.661197] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1810.661415] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc3f4de7-8b9c-4a44-ad1f-fd9afbbfda67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.681096] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1810.681096] env[62619]: value = "task-1778363" [ 1810.681096] env[62619]: _type = "Task" [ 1810.681096] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.688780] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778363, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.801206] env[62619]: DEBUG nova.compute.manager [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62619) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1810.828060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.863s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.830299] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.610s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.830507] env[62619]: DEBUG nova.objects.instance [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lazy-loading 'resources' on Instance uuid 32aed8cd-1583-4253-bfb6-a98610e2f32e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1810.850946] env[62619]: INFO nova.scheduler.client.report [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Deleted allocations for instance f3345332-5a22-4a1c-ac74-4e8f2ceb3f15 [ 1810.986165] env[62619]: DEBUG nova.compute.manager [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1811.015717] env[62619]: DEBUG nova.compute.manager [req-c1530026-ff8d-48d5-8552-47bbcb1fd5b9 req-3f1e2e34-5e75-46c5-8c3b-9cf9fd34f869 service nova] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Received event network-changed-e127117d-d3f7-4878-bd28-c36eddba80b8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1811.016234] env[62619]: DEBUG nova.compute.manager [req-c1530026-ff8d-48d5-8552-47bbcb1fd5b9 req-3f1e2e34-5e75-46c5-8c3b-9cf9fd34f869 service nova] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Refreshing instance network info cache due to event network-changed-e127117d-d3f7-4878-bd28-c36eddba80b8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1811.016234] env[62619]: DEBUG oslo_concurrency.lockutils [req-c1530026-ff8d-48d5-8552-47bbcb1fd5b9 req-3f1e2e34-5e75-46c5-8c3b-9cf9fd34f869 service nova] Acquiring lock "refresh_cache-a250f05d-cd74-436d-b656-2a9e55527809" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.016359] env[62619]: DEBUG oslo_concurrency.lockutils [req-c1530026-ff8d-48d5-8552-47bbcb1fd5b9 req-3f1e2e34-5e75-46c5-8c3b-9cf9fd34f869 service nova] Acquired lock "refresh_cache-a250f05d-cd74-436d-b656-2a9e55527809" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.016488] env[62619]: DEBUG nova.network.neutron [req-c1530026-ff8d-48d5-8552-47bbcb1fd5b9 req-3f1e2e34-5e75-46c5-8c3b-9cf9fd34f869 service nova] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Refreshing network info cache for port e127117d-d3f7-4878-bd28-c36eddba80b8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1811.019971] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1811.020191] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1811.020379] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1811.020571] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1811.020711] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1811.020854] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1811.021067] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1811.021223] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1811.021386] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1811.021557] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1811.021770] env[62619]: DEBUG nova.virt.hardware [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1811.022882] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5ad8a4-4dc7-418b-bf9d-c8f94a499a41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.033168] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9850fdde-8046-4f78-8fc9-0eeeb5ea57f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.109684] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778362, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.191466] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778363, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.338049] env[62619]: DEBUG nova.compute.manager [req-05a5f7c8-f9e3-45bf-a799-8189d4821e7e req-2f452cba-6b93-4af0-9153-95e607f74e05 service nova] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Received event network-vif-plugged-67553e4f-60e3-4b66-acde-8a299ea8545b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1811.339027] env[62619]: DEBUG oslo_concurrency.lockutils [req-05a5f7c8-f9e3-45bf-a799-8189d4821e7e req-2f452cba-6b93-4af0-9153-95e607f74e05 service nova] Acquiring lock "d3aa352b-7d2d-416e-a579-9636619bb025-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.339027] env[62619]: DEBUG oslo_concurrency.lockutils [req-05a5f7c8-f9e3-45bf-a799-8189d4821e7e req-2f452cba-6b93-4af0-9153-95e607f74e05 service nova] Lock "d3aa352b-7d2d-416e-a579-9636619bb025-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.339027] env[62619]: DEBUG oslo_concurrency.lockutils [req-05a5f7c8-f9e3-45bf-a799-8189d4821e7e req-2f452cba-6b93-4af0-9153-95e607f74e05 service nova] Lock "d3aa352b-7d2d-416e-a579-9636619bb025-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.339027] env[62619]: DEBUG nova.compute.manager [req-05a5f7c8-f9e3-45bf-a799-8189d4821e7e req-2f452cba-6b93-4af0-9153-95e607f74e05 service nova] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] No waiting events found dispatching network-vif-plugged-67553e4f-60e3-4b66-acde-8a299ea8545b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1811.339027] env[62619]: WARNING nova.compute.manager [req-05a5f7c8-f9e3-45bf-a799-8189d4821e7e req-2f452cba-6b93-4af0-9153-95e607f74e05 service nova] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Received unexpected event network-vif-plugged-67553e4f-60e3-4b66-acde-8a299ea8545b for instance with vm_state building and task_state spawning. [ 1811.359614] env[62619]: DEBUG oslo_concurrency.lockutils [None req-73728156-f2c1-46ef-9012-eeb5c4053b93 tempest-ServersTestFqdnHostnames-2141079200 tempest-ServersTestFqdnHostnames-2141079200-project-member] Lock "f3345332-5a22-4a1c-ac74-4e8f2ceb3f15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.371s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.362697] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.363105] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.363264] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1811.363642] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1811.442137] env[62619]: DEBUG nova.network.neutron [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Successfully updated port: 67553e4f-60e3-4b66-acde-8a299ea8545b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1811.576706] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457e5b6e-4fb5-4791-b680-2f862cf013c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.586210] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c38e52f-ad91-4e40-8d31-d4b7c5708a0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.623903] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4849b9-fb47-4b0c-8f32-bd6da97f2f37 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.632392] env[62619]: DEBUG oslo_vmware.api [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778362, 'name': RemoveSnapshot_Task, 'duration_secs': 2.107913} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.636254] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1811.636254] env[62619]: INFO nova.compute.manager [None req-b2152c33-5b78-4b2e-9623-c0dde05afed6 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Took 17.46 seconds to snapshot the instance on the hypervisor. [ 1811.638437] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196d525c-8abb-4380-bc58-48b99df66551 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.653908] env[62619]: DEBUG nova.compute.provider_tree [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1811.691624] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778363, 'name': CreateVM_Task, 'duration_secs': 0.63857} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.691771] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1811.692462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.692581] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.692861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1811.693117] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f50904c6-4ec6-44ff-aa7f-fe8feb581d85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.698340] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1811.698340] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fe13e7-493c-bf38-12a9-0f102ab3603c" [ 1811.698340] env[62619]: _type = "Task" [ 1811.698340] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.706136] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fe13e7-493c-bf38-12a9-0f102ab3603c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.869022] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1811.869022] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1811.907762] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.908131] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquired lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.908435] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1811.908771] env[62619]: DEBUG nova.objects.instance [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lazy-loading 'info_cache' on Instance uuid 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1811.928761] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.949283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "refresh_cache-d3aa352b-7d2d-416e-a579-9636619bb025" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.949283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired lock "refresh_cache-d3aa352b-7d2d-416e-a579-9636619bb025" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.949283] env[62619]: DEBUG nova.network.neutron [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1812.092084] env[62619]: DEBUG nova.network.neutron [req-c1530026-ff8d-48d5-8552-47bbcb1fd5b9 req-3f1e2e34-5e75-46c5-8c3b-9cf9fd34f869 service nova] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Updated VIF entry in instance network info cache for port e127117d-d3f7-4878-bd28-c36eddba80b8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1812.092480] env[62619]: DEBUG nova.network.neutron [req-c1530026-ff8d-48d5-8552-47bbcb1fd5b9 req-3f1e2e34-5e75-46c5-8c3b-9cf9fd34f869 service nova] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Updating instance_info_cache with network_info: [{"id": "e127117d-d3f7-4878-bd28-c36eddba80b8", "address": "fa:16:3e:57:64:1c", "network": {"id": "91d6666a-8674-4957-8cbe-8e730e5c0741", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1893713214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9cfa8d5b184320ae434919598191ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape127117d-d3", "ovs_interfaceid": "e127117d-d3f7-4878-bd28-c36eddba80b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.157054] env[62619]: DEBUG nova.scheduler.client.report [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1812.213028] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52fe13e7-493c-bf38-12a9-0f102ab3603c, 'name': SearchDatastore_Task, 'duration_secs': 0.012168} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.213771] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.213771] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1812.213969] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.214195] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.214412] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1812.215121] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-682f1631-b856-4f6c-a355-8a45f51dffea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.229369] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1812.229868] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1812.231182] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2905447e-03ba-4afa-a50f-d7332d3c63e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.238645] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1812.238645] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523446e9-4f90-b3e2-d51e-d780807743fe" [ 1812.238645] env[62619]: _type = "Task" [ 1812.238645] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.247960] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523446e9-4f90-b3e2-d51e-d780807743fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.486126] env[62619]: DEBUG nova.network.neutron [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1812.595362] env[62619]: DEBUG oslo_concurrency.lockutils [req-c1530026-ff8d-48d5-8552-47bbcb1fd5b9 req-3f1e2e34-5e75-46c5-8c3b-9cf9fd34f869 service nova] Releasing lock "refresh_cache-a250f05d-cd74-436d-b656-2a9e55527809" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.640691] env[62619]: DEBUG nova.network.neutron [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Updating instance_info_cache with network_info: [{"id": "67553e4f-60e3-4b66-acde-8a299ea8545b", "address": "fa:16:3e:e6:db:71", "network": {"id": "91d6666a-8674-4957-8cbe-8e730e5c0741", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1893713214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9cfa8d5b184320ae434919598191ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67553e4f-60", "ovs_interfaceid": "67553e4f-60e3-4b66-acde-8a299ea8545b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.664130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.833s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.667985] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.808s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.670796] env[62619]: INFO nova.compute.claims [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1812.700933] env[62619]: INFO nova.scheduler.client.report [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Deleted allocations for instance 32aed8cd-1583-4253-bfb6-a98610e2f32e [ 1812.750823] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523446e9-4f90-b3e2-d51e-d780807743fe, 'name': SearchDatastore_Task, 'duration_secs': 0.03237} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.752309] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-535e985a-42fb-40bb-8438-9cf4374b3b85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.757958] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1812.757958] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527150a2-cd9f-6d21-c547-757a713ea655" [ 1812.757958] env[62619]: _type = "Task" [ 1812.757958] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.765978] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527150a2-cd9f-6d21-c547-757a713ea655, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.825765] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquiring lock "e19650da-cc3d-4350-be3e-dc776ce68206" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.826035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lock "e19650da-cc3d-4350-be3e-dc776ce68206" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.826246] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquiring lock "e19650da-cc3d-4350-be3e-dc776ce68206-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.826431] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lock "e19650da-cc3d-4350-be3e-dc776ce68206-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.826600] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lock "e19650da-cc3d-4350-be3e-dc776ce68206-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.828814] env[62619]: INFO nova.compute.manager [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Terminating instance [ 1813.144148] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Releasing lock "refresh_cache-d3aa352b-7d2d-416e-a579-9636619bb025" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.146162] env[62619]: DEBUG nova.compute.manager [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Instance network_info: |[{"id": "67553e4f-60e3-4b66-acde-8a299ea8545b", "address": "fa:16:3e:e6:db:71", "network": {"id": "91d6666a-8674-4957-8cbe-8e730e5c0741", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1893713214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9cfa8d5b184320ae434919598191ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67553e4f-60", "ovs_interfaceid": "67553e4f-60e3-4b66-acde-8a299ea8545b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1813.146162] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:db:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '030ecc21-dc1c-4283-854e-88e623b3970a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67553e4f-60e3-4b66-acde-8a299ea8545b', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1813.153636] env[62619]: DEBUG oslo.service.loopingcall [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1813.153636] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1813.153829] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0889674-2141-4159-b7b6-a3d2c97e67a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.175262] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1813.175262] env[62619]: value = "task-1778364" [ 1813.175262] env[62619]: _type = "Task" [ 1813.175262] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.186800] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778364, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.213437] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6efc20-8b7f-4cdd-9416-a59f73473752 tempest-AttachInterfacesUnderV243Test-1665821301 tempest-AttachInterfacesUnderV243Test-1665821301-project-member] Lock "32aed8cd-1583-4253-bfb6-a98610e2f32e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.611s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.270826] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527150a2-cd9f-6d21-c547-757a713ea655, 'name': SearchDatastore_Task, 'duration_secs': 0.011767} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.271108] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.271365] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a250f05d-cd74-436d-b656-2a9e55527809/a250f05d-cd74-436d-b656-2a9e55527809.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1813.271628] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-114c70b8-2b82-46a4-9425-c6d4d89d20d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.278572] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1813.278572] env[62619]: value = "task-1778365" [ 1813.278572] env[62619]: _type = "Task" [ 1813.278572] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.286958] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778365, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.332567] env[62619]: DEBUG nova.compute.manager [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1813.332804] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1813.333827] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0635f4-b622-43e0-b261-433cd1e2ae10 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.342649] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1813.342649] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf321a52-b4e3-4c1d-a4cb-2fe959dab481 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.348110] env[62619]: DEBUG oslo_vmware.api [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1813.348110] env[62619]: value = "task-1778366" [ 1813.348110] env[62619]: _type = "Task" [ 1813.348110] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.356220] env[62619]: DEBUG oslo_vmware.api [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778366, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.367409] env[62619]: DEBUG nova.compute.manager [req-0df05dcf-f033-4598-a404-cb90f9cbe043 req-befc8c42-043d-4565-b909-636e11abacb2 service nova] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Received event network-changed-67553e4f-60e3-4b66-acde-8a299ea8545b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1813.367475] env[62619]: DEBUG nova.compute.manager [req-0df05dcf-f033-4598-a404-cb90f9cbe043 req-befc8c42-043d-4565-b909-636e11abacb2 service nova] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Refreshing instance network info cache due to event network-changed-67553e4f-60e3-4b66-acde-8a299ea8545b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1813.367795] env[62619]: DEBUG oslo_concurrency.lockutils [req-0df05dcf-f033-4598-a404-cb90f9cbe043 req-befc8c42-043d-4565-b909-636e11abacb2 service nova] Acquiring lock "refresh_cache-d3aa352b-7d2d-416e-a579-9636619bb025" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.368050] env[62619]: DEBUG oslo_concurrency.lockutils [req-0df05dcf-f033-4598-a404-cb90f9cbe043 req-befc8c42-043d-4565-b909-636e11abacb2 service nova] Acquired lock "refresh_cache-d3aa352b-7d2d-416e-a579-9636619bb025" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.368214] env[62619]: DEBUG nova.network.neutron [req-0df05dcf-f033-4598-a404-cb90f9cbe043 req-befc8c42-043d-4565-b909-636e11abacb2 service nova] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Refreshing network info cache for port 67553e4f-60e3-4b66-acde-8a299ea8545b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1813.695137] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778364, 'name': CreateVM_Task, 'duration_secs': 0.494863} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.699097] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1813.700719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.700719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.701076] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1813.701624] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b27e37dd-e39d-4347-8f0a-ab8b1cf179df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.708350] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1813.708350] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5244925e-242f-91f4-d9a1-cb32e79c7c19" [ 1813.708350] env[62619]: _type = "Task" [ 1813.708350] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.728506] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5244925e-242f-91f4-d9a1-cb32e79c7c19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.761359] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Updating instance_info_cache with network_info: [{"id": "28e9da04-af12-4a21-b4ee-408c492669ef", "address": "fa:16:3e:00:1b:cf", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28e9da04-af", "ovs_interfaceid": "28e9da04-af12-4a21-b4ee-408c492669ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.794616] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778365, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.862797] env[62619]: DEBUG oslo_vmware.api [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778366, 'name': PowerOffVM_Task, 'duration_secs': 0.232675} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.863113] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1813.863291] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1813.863557] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45d04542-4356-41d1-8f35-a6e6574d8033 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.998449] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e3db99-4b8e-407f-8fd8-6775e6aa69b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.006664] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7722d89f-d1f0-4566-a2c3-4fd0b604fa4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.043308] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09900c2f-5b9f-4ead-a3ab-e05a99b096de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.053573] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f81d741-ec73-48fa-b486-b1a2eebd9a7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.060780] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1814.060941] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1814.061099] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Deleting the datastore file [datastore1] e19650da-cc3d-4350-be3e-dc776ce68206 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1814.061414] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab15e5c2-8e69-4a94-9c88-a8a3622bce55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.071530] env[62619]: DEBUG nova.compute.provider_tree [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1814.077612] env[62619]: DEBUG oslo_vmware.api [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for the task: (returnval){ [ 1814.077612] env[62619]: value = "task-1778368" [ 1814.077612] env[62619]: _type = "Task" [ 1814.077612] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.090154] env[62619]: DEBUG oslo_vmware.api [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778368, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.213788] env[62619]: DEBUG nova.network.neutron [req-0df05dcf-f033-4598-a404-cb90f9cbe043 req-befc8c42-043d-4565-b909-636e11abacb2 service nova] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Updated VIF entry in instance network info cache for port 67553e4f-60e3-4b66-acde-8a299ea8545b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1814.214163] env[62619]: DEBUG nova.network.neutron [req-0df05dcf-f033-4598-a404-cb90f9cbe043 req-befc8c42-043d-4565-b909-636e11abacb2 service nova] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Updating instance_info_cache with network_info: [{"id": "67553e4f-60e3-4b66-acde-8a299ea8545b", "address": "fa:16:3e:e6:db:71", "network": {"id": "91d6666a-8674-4957-8cbe-8e730e5c0741", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1893713214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9cfa8d5b184320ae434919598191ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67553e4f-60", "ovs_interfaceid": "67553e4f-60e3-4b66-acde-8a299ea8545b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.223051] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5244925e-242f-91f4-d9a1-cb32e79c7c19, 'name': SearchDatastore_Task, 'duration_secs': 0.055962} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.224641] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.224641] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1814.224641] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.224641] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.224641] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1814.224641] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f6560cf-cdf5-43c8-86e6-ad048ef3ccf8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.234791] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1814.234980] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1814.235763] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e48c960-9063-440f-8a4d-a8ef05a20c3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.242671] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1814.242671] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52380b2f-ae45-f22a-8f74-a3a09c2771de" [ 1814.242671] env[62619]: _type = "Task" [ 1814.242671] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.251635] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52380b2f-ae45-f22a-8f74-a3a09c2771de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.266929] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Releasing lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.267111] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1814.267362] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1814.267557] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1814.267709] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1814.267852] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_power_states {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1814.295614] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778365, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571498} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.297069] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a250f05d-cd74-436d-b656-2a9e55527809/a250f05d-cd74-436d-b656-2a9e55527809.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1814.297363] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1814.297623] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e5f7e4e-e987-4049-9673-0aeaee36a4a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.306373] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1814.306373] env[62619]: value = "task-1778369" [ 1814.306373] env[62619]: _type = "Task" [ 1814.306373] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.315166] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.575225] env[62619]: DEBUG nova.scheduler.client.report [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1814.593794] env[62619]: DEBUG oslo_vmware.api [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Task: {'id': task-1778368, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.327999} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.594081] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1814.594273] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1814.594542] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1814.594630] env[62619]: INFO nova.compute.manager [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1814.594868] env[62619]: DEBUG oslo.service.loopingcall [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1814.595077] env[62619]: DEBUG nova.compute.manager [-] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1814.595171] env[62619]: DEBUG nova.network.neutron [-] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1814.717689] env[62619]: DEBUG oslo_concurrency.lockutils [req-0df05dcf-f033-4598-a404-cb90f9cbe043 req-befc8c42-043d-4565-b909-636e11abacb2 service nova] Releasing lock "refresh_cache-d3aa352b-7d2d-416e-a579-9636619bb025" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.754804] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52380b2f-ae45-f22a-8f74-a3a09c2771de, 'name': SearchDatastore_Task, 'duration_secs': 0.05687} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.755651] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5143cb52-bb03-43d0-b309-686a1f4caa36 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.760861] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1814.760861] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52af654c-fe66-7813-b7af-e1cfd5d5c075" [ 1814.760861] env[62619]: _type = "Task" [ 1814.760861] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.770112] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52af654c-fe66-7813-b7af-e1cfd5d5c075, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.776058] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Getting list of instances from cluster (obj){ [ 1814.776058] env[62619]: value = "domain-c8" [ 1814.776058] env[62619]: _type = "ClusterComputeResource" [ 1814.776058] env[62619]: } {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1814.776058] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf5e2f4-23a8-4cf2-bd84-a318d14e6833 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.794317] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Got total of 10 instances {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1814.794482] env[62619]: WARNING nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] While synchronizing instance power states, found 13 instances in the database and 10 instances on the hypervisor. [ 1814.794624] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.794814] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid e32cb991-a018-4b55-8cdf-378e212c8434 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.794964] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid 5cf7ca57-351f-48ab-8758-b30f50cd607f {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.795130] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid 917960ca-3870-4e4e-aafe-3c6d77cf7c51 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.795278] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid 8745aa7f-9848-4320-94b5-08b7e3bccf80 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.795424] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid e302e431-1f95-4ab5-bfca-59450fd887f0 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.795571] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid e19650da-cc3d-4350-be3e-dc776ce68206 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.795710] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid 52b29fef-eab6-4541-a570-af9c0c021a75 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.795851] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid 0987b6ec-2587-4f15-adbb-f563e19ecce9 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.796122] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid cee0356c-25d7-48ca-be09-16b0e1b56a41 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.796228] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid a250f05d-cd74-436d-b656-2a9e55527809 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.796306] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid d3aa352b-7d2d-416e-a579-9636619bb025 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.796443] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Triggering sync for uuid 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1814.796794] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.797020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.797289] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "e32cb991-a018-4b55-8cdf-378e212c8434" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.797494] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "e32cb991-a018-4b55-8cdf-378e212c8434" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.797725] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.797899] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.799225] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.799225] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.799225] env[62619]: INFO nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] During sync_power_state the instance has a pending task (resize_reverting). Skip. [ 1814.799225] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.799225] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "8745aa7f-9848-4320-94b5-08b7e3bccf80" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.799418] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "8745aa7f-9848-4320-94b5-08b7e3bccf80" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.799596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "e302e431-1f95-4ab5-bfca-59450fd887f0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.799778] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "e302e431-1f95-4ab5-bfca-59450fd887f0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.800025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "e19650da-cc3d-4350-be3e-dc776ce68206" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.800201] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "52b29fef-eab6-4541-a570-af9c0c021a75" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.800366] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.800573] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "0987b6ec-2587-4f15-adbb-f563e19ecce9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.800763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.800927] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.801153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "a250f05d-cd74-436d-b656-2a9e55527809" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.801344] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "d3aa352b-7d2d-416e-a579-9636619bb025" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.801525] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.802856] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10eaf9b1-2c8b-455f-9ae0-5400b180e071 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.805952] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5618f9-ece8-4d92-b7f5-7948d1157476 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.808946] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c947438-1d60-4618-9fc1-dd0171b9d727 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.811748] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9776090c-435a-41fb-be0e-167321d61bbe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.814401] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e528ae43-9de9-427d-a781-2a53154b1905 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.816932] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64de4670-7cd1-452c-84a8-65fb94210c38 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.819979] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb0da98-cd95-44d4-92b5-ddf7a3a91de9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.857646] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078902} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.858578] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1814.859707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deee801e-6d86-4ab5-86a9-8524cf9a9951 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.886165] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] a250f05d-cd74-436d-b656-2a9e55527809/a250f05d-cd74-436d-b656-2a9e55527809.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1814.886857] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eabd439c-d3a2-48af-b620-b3d6afd5f9cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.909073] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1814.909073] env[62619]: value = "task-1778370" [ 1814.909073] env[62619]: _type = "Task" [ 1814.909073] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.917470] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778370, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.084265] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.084887] env[62619]: DEBUG nova.compute.manager [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1815.088666] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.749s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.090229] env[62619]: INFO nova.compute.claims [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1815.275937] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52af654c-fe66-7813-b7af-e1cfd5d5c075, 'name': SearchDatastore_Task, 'duration_secs': 0.026303} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.276321] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.276648] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d3aa352b-7d2d-416e-a579-9636619bb025/d3aa352b-7d2d-416e-a579-9636619bb025.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1815.276973] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-336a91c4-1ab4-4781-979e-e6fad2573218 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.283289] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1815.283289] env[62619]: value = "task-1778371" [ 1815.283289] env[62619]: _type = "Task" [ 1815.283289] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.290882] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778371, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.348856] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.548s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.350083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "e302e431-1f95-4ab5-bfca-59450fd887f0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.549s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.350083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.553s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.350083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "e32cb991-a018-4b55-8cdf-378e212c8434" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.552s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.350239] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.552s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.360114] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.559s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.392749] env[62619]: DEBUG nova.compute.manager [req-8121f5af-0dff-4e5f-86ea-97b646c13299 req-ce189b35-d1d7-4946-adc3-45f106ff1813 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Received event network-vif-deleted-0a3f2ddf-2344-4713-a719-025a5945f591 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1815.392981] env[62619]: INFO nova.compute.manager [req-8121f5af-0dff-4e5f-86ea-97b646c13299 req-ce189b35-d1d7-4946-adc3-45f106ff1813 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Neutron deleted interface 0a3f2ddf-2344-4713-a719-025a5945f591; detaching it from the instance and deleting it from the info cache [ 1815.393175] env[62619]: DEBUG nova.network.neutron [req-8121f5af-0dff-4e5f-86ea-97b646c13299 req-ce189b35-d1d7-4946-adc3-45f106ff1813 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.402433] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "8745aa7f-9848-4320-94b5-08b7e3bccf80" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.603s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.421517] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778370, 'name': ReconfigVM_Task, 'duration_secs': 0.337203} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.422562] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Reconfigured VM instance instance-00000062 to attach disk [datastore1] a250f05d-cd74-436d-b656-2a9e55527809/a250f05d-cd74-436d-b656-2a9e55527809.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1815.422562] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7400d50e-e332-477e-8441-1f6be938b779 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.433501] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1815.433501] env[62619]: value = "task-1778372" [ 1815.433501] env[62619]: _type = "Task" [ 1815.433501] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.446396] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778372, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.594952] env[62619]: DEBUG nova.compute.utils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1815.601853] env[62619]: DEBUG nova.compute.manager [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1815.608245] env[62619]: DEBUG nova.network.neutron [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1815.613205] env[62619]: DEBUG nova.network.neutron [-] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.694178] env[62619]: DEBUG nova.policy [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8d937f303584c3daea133a6283fd5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23d77e73a09d492695fbfe6ac2c93371', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1815.710094] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1815.710470] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1815.793898] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778371, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.898757] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31b94774-0616-4335-bc89-a6283ac4ce1e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.906706] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07392d87-2ad4-4389-ae6a-b0c858c9b31c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.952032] env[62619]: DEBUG nova.compute.manager [req-8121f5af-0dff-4e5f-86ea-97b646c13299 req-ce189b35-d1d7-4946-adc3-45f106ff1813 service nova] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Detach interface failed, port_id=0a3f2ddf-2344-4713-a719-025a5945f591, reason: Instance e19650da-cc3d-4350-be3e-dc776ce68206 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1815.958487] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778372, 'name': Rename_Task, 'duration_secs': 0.247392} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.958903] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1815.959190] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca1312b6-a8c7-412c-991a-d394a6a75816 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.967645] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1815.967645] env[62619]: value = "task-1778373" [ 1815.967645] env[62619]: _type = "Task" [ 1815.967645] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.976723] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.111872] env[62619]: DEBUG nova.compute.manager [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1816.116373] env[62619]: INFO nova.compute.manager [-] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Took 1.52 seconds to deallocate network for instance. [ 1816.270274] env[62619]: DEBUG nova.network.neutron [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Successfully created port: 0364c1be-595c-4984-9173-39fd5163c9ad {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1816.302185] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778371, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534003} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.302446] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] d3aa352b-7d2d-416e-a579-9636619bb025/d3aa352b-7d2d-416e-a579-9636619bb025.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1816.302662] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1816.302915] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37236152-9aca-4b1a-9140-1604c65e8a49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.310443] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1816.310443] env[62619]: value = "task-1778374" [ 1816.310443] env[62619]: _type = "Task" [ 1816.310443] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.323860] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.484292] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778373, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.500684] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d529eb9-d612-4503-8324-8472e16eea20 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.509154] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617a67e2-1313-491e-abff-a173783215c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.542907] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1e526d-c90f-459a-8bdd-b908be48e608 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.551913] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7f3529-606c-4f85-8795-821427c023bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.569111] env[62619]: DEBUG nova.compute.provider_tree [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1816.622665] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.820902] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069838} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.821602] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1816.822221] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e932621-e73b-49e0-b000-4d40c20de5f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.846835] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] d3aa352b-7d2d-416e-a579-9636619bb025/d3aa352b-7d2d-416e-a579-9636619bb025.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1816.847149] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b6726f9-bdc7-4e35-a6f4-399fdc6413d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.867444] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1816.867444] env[62619]: value = "task-1778375" [ 1816.867444] env[62619]: _type = "Task" [ 1816.867444] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.874888] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778375, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.987866] env[62619]: DEBUG oslo_vmware.api [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778373, 'name': PowerOnVM_Task, 'duration_secs': 0.522967} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.988199] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1816.988433] env[62619]: INFO nova.compute.manager [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Took 8.38 seconds to spawn the instance on the hypervisor. [ 1816.988652] env[62619]: DEBUG nova.compute.manager [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1816.989535] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca104b8-550a-4318-abf9-c28441605027 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.076335] env[62619]: DEBUG nova.scheduler.client.report [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1817.124134] env[62619]: DEBUG nova.compute.manager [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1817.165928] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1817.166177] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1817.166389] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1817.166613] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1817.166801] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1817.166992] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1817.167238] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1817.167425] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1817.167602] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1817.167766] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1817.167934] env[62619]: DEBUG nova.virt.hardware [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1817.168831] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfc9cfd-90d2-4d12-a101-4f19ad0561fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.181025] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafbe9db-5ab4-4170-a9d3-c716b0f8757b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.380728] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778375, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.510031] env[62619]: INFO nova.compute.manager [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Took 28.65 seconds to build instance. [ 1817.583663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.584794] env[62619]: DEBUG nova.compute.manager [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1817.586996] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.570s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.587239] env[62619]: DEBUG nova.objects.instance [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lazy-loading 'resources' on Instance uuid 0987b6ec-2587-4f15-adbb-f563e19ecce9 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1817.886070] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778375, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.013091] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c5c46a1-00ee-45d6-868a-33eabc1bc73a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "a250f05d-cd74-436d-b656-2a9e55527809" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.162s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.014316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "a250f05d-cd74-436d-b656-2a9e55527809" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.213s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.016091] env[62619]: INFO nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: a250f05d-cd74-436d-b656-2a9e55527809] During sync_power_state the instance has a pending task (spawning). Skip. [ 1818.016169] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "a250f05d-cd74-436d-b656-2a9e55527809" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.002s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.019240] env[62619]: DEBUG nova.compute.manager [req-f08062bb-47d0-4eab-8460-39706b4a9180 req-ab448a00-b099-43bb-9eca-f2014f09463d service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received event network-vif-plugged-0364c1be-595c-4984-9173-39fd5163c9ad {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1818.019448] env[62619]: DEBUG oslo_concurrency.lockutils [req-f08062bb-47d0-4eab-8460-39706b4a9180 req-ab448a00-b099-43bb-9eca-f2014f09463d service nova] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.019828] env[62619]: DEBUG oslo_concurrency.lockutils [req-f08062bb-47d0-4eab-8460-39706b4a9180 req-ab448a00-b099-43bb-9eca-f2014f09463d service nova] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.019828] env[62619]: DEBUG oslo_concurrency.lockutils [req-f08062bb-47d0-4eab-8460-39706b4a9180 req-ab448a00-b099-43bb-9eca-f2014f09463d service nova] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.019969] env[62619]: DEBUG nova.compute.manager [req-f08062bb-47d0-4eab-8460-39706b4a9180 req-ab448a00-b099-43bb-9eca-f2014f09463d service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] No waiting events found dispatching network-vif-plugged-0364c1be-595c-4984-9173-39fd5163c9ad {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1818.020139] env[62619]: WARNING nova.compute.manager [req-f08062bb-47d0-4eab-8460-39706b4a9180 req-ab448a00-b099-43bb-9eca-f2014f09463d service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received unexpected event network-vif-plugged-0364c1be-595c-4984-9173-39fd5163c9ad for instance with vm_state building and task_state spawning. [ 1818.090430] env[62619]: DEBUG nova.compute.utils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1818.091832] env[62619]: DEBUG nova.compute.manager [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1818.095177] env[62619]: DEBUG nova.network.neutron [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1818.140080] env[62619]: DEBUG nova.network.neutron [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Successfully updated port: 0364c1be-595c-4984-9173-39fd5163c9ad {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1818.203117] env[62619]: DEBUG nova.policy [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e9094d6b3854c1184307d9bc35a966e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e11e1bca0c747fd8b4a0ca3e220ba4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1818.240550] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquiring lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.240784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.380200] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778375, 'name': ReconfigVM_Task, 'duration_secs': 1.312371} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.380493] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Reconfigured VM instance instance-00000063 to attach disk [datastore1] d3aa352b-7d2d-416e-a579-9636619bb025/d3aa352b-7d2d-416e-a579-9636619bb025.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1818.381178] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d30ea24c-9948-4e6e-b82c-f33e28c4e370 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.389827] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1818.389827] env[62619]: value = "task-1778376" [ 1818.389827] env[62619]: _type = "Task" [ 1818.389827] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.398458] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778376, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.403342] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3fce21-8d5b-4c36-ba1a-77f546056765 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.416347] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7bc24f-2d4b-4029-be9e-7f63a520c923 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.460062] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25eede26-f97f-44b7-95b5-13b4e34be8a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.470266] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c580df-912d-41b8-9f40-e884e22b8c48 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.493971] env[62619]: DEBUG nova.compute.provider_tree [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1818.543563] env[62619]: DEBUG nova.network.neutron [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Successfully created port: 963179af-510c-47e9-a81a-2f99f2055e2c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1818.602340] env[62619]: DEBUG nova.compute.manager [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1818.645449] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.645712] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.645881] env[62619]: DEBUG nova.network.neutron [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1818.743329] env[62619]: DEBUG nova.compute.manager [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1818.904931] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778376, 'name': Rename_Task, 'duration_secs': 0.137636} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.905242] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1818.905563] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31352867-f61b-444a-8f90-bac4315ba16d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.917747] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1818.917747] env[62619]: value = "task-1778377" [ 1818.917747] env[62619]: _type = "Task" [ 1818.917747] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.928770] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778377, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.002138] env[62619]: DEBUG nova.scheduler.client.report [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1819.191948] env[62619]: DEBUG nova.network.neutron [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1819.275743] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.398730] env[62619]: DEBUG nova.network.neutron [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [{"id": "0364c1be-595c-4984-9173-39fd5163c9ad", "address": "fa:16:3e:d1:3c:33", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364c1be-59", "ovs_interfaceid": "0364c1be-595c-4984-9173-39fd5163c9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1819.434107] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778377, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.510938] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.924s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.514437] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.567s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.519118] env[62619]: INFO nova.compute.claims [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1819.538793] env[62619]: INFO nova.scheduler.client.report [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Deleted allocations for instance 0987b6ec-2587-4f15-adbb-f563e19ecce9 [ 1819.614913] env[62619]: DEBUG nova.compute.manager [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1819.651993] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1819.652261] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1819.652417] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1819.652597] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1819.652770] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1819.652927] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1819.656784] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1819.657014] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1819.657206] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1819.657329] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1819.657542] env[62619]: DEBUG nova.virt.hardware [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1819.658453] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68639bb-d933-442a-8bda-9327f104d95c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.671479] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851723d3-12ae-4b2b-8f16-7b1fdbc9198f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.905545] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.905988] env[62619]: DEBUG nova.compute.manager [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Instance network_info: |[{"id": "0364c1be-595c-4984-9173-39fd5163c9ad", "address": "fa:16:3e:d1:3c:33", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364c1be-59", "ovs_interfaceid": "0364c1be-595c-4984-9173-39fd5163c9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1819.906561] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:3c:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9aa05ef8-c7bb-4af5-983f-bfa0f3f88223', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0364c1be-595c-4984-9173-39fd5163c9ad', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1819.914323] env[62619]: DEBUG oslo.service.loopingcall [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1819.914610] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1819.916685] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0cd70424-1a9b-4d20-9be9-7c8e4753731a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.948156] env[62619]: DEBUG oslo_vmware.api [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778377, 'name': PowerOnVM_Task, 'duration_secs': 0.621119} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.950177] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1819.951030] env[62619]: INFO nova.compute.manager [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Took 8.96 seconds to spawn the instance on the hypervisor. [ 1819.951030] env[62619]: DEBUG nova.compute.manager [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1819.951180] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1819.951180] env[62619]: value = "task-1778378" [ 1819.951180] env[62619]: _type = "Task" [ 1819.951180] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.952081] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43386bd-909c-4b58-acd7-86c24792722f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.968168] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778378, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.048640] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b34d575b-2fdf-42d8-8f9c-0191b6f215d1 tempest-ServerDiskConfigTestJSON-1493618009 tempest-ServerDiskConfigTestJSON-1493618009-project-member] Lock "0987b6ec-2587-4f15-adbb-f563e19ecce9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.487s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.049739] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "0987b6ec-2587-4f15-adbb-f563e19ecce9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.249s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.050527] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2fe53ebb-083f-4386-8a66-74cce18daf3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.060868] env[62619]: DEBUG nova.compute.manager [req-182200ec-8c6c-4ecf-9e18-92b22b4527dc req-cfd0a6e4-f628-4c93-846c-e94da09d9f5e service nova] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Received event network-vif-plugged-963179af-510c-47e9-a81a-2f99f2055e2c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1820.061104] env[62619]: DEBUG oslo_concurrency.lockutils [req-182200ec-8c6c-4ecf-9e18-92b22b4527dc req-cfd0a6e4-f628-4c93-846c-e94da09d9f5e service nova] Acquiring lock "54da64a0-4acf-4025-9b51-7af61dbd55fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.061402] env[62619]: DEBUG oslo_concurrency.lockutils [req-182200ec-8c6c-4ecf-9e18-92b22b4527dc req-cfd0a6e4-f628-4c93-846c-e94da09d9f5e service nova] Lock "54da64a0-4acf-4025-9b51-7af61dbd55fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.061583] env[62619]: DEBUG oslo_concurrency.lockutils [req-182200ec-8c6c-4ecf-9e18-92b22b4527dc req-cfd0a6e4-f628-4c93-846c-e94da09d9f5e service nova] Lock "54da64a0-4acf-4025-9b51-7af61dbd55fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.061739] env[62619]: DEBUG nova.compute.manager [req-182200ec-8c6c-4ecf-9e18-92b22b4527dc req-cfd0a6e4-f628-4c93-846c-e94da09d9f5e service nova] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] No waiting events found dispatching network-vif-plugged-963179af-510c-47e9-a81a-2f99f2055e2c {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1820.061900] env[62619]: WARNING nova.compute.manager [req-182200ec-8c6c-4ecf-9e18-92b22b4527dc req-cfd0a6e4-f628-4c93-846c-e94da09d9f5e service nova] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Received unexpected event network-vif-plugged-963179af-510c-47e9-a81a-2f99f2055e2c for instance with vm_state building and task_state spawning. [ 1820.068750] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f076fe-b0d5-4491-9d13-e731e722756a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.085503] env[62619]: DEBUG nova.compute.manager [req-ca1c519b-b270-4581-b02f-f63c776d67c9 req-ebb6a411-c1c0-4916-b727-2c7dc7214487 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received event network-changed-0364c1be-595c-4984-9173-39fd5163c9ad {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1820.085503] env[62619]: DEBUG nova.compute.manager [req-ca1c519b-b270-4581-b02f-f63c776d67c9 req-ebb6a411-c1c0-4916-b727-2c7dc7214487 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Refreshing instance network info cache due to event network-changed-0364c1be-595c-4984-9173-39fd5163c9ad. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1820.085503] env[62619]: DEBUG oslo_concurrency.lockutils [req-ca1c519b-b270-4581-b02f-f63c776d67c9 req-ebb6a411-c1c0-4916-b727-2c7dc7214487 service nova] Acquiring lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.085503] env[62619]: DEBUG oslo_concurrency.lockutils [req-ca1c519b-b270-4581-b02f-f63c776d67c9 req-ebb6a411-c1c0-4916-b727-2c7dc7214487 service nova] Acquired lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.085503] env[62619]: DEBUG nova.network.neutron [req-ca1c519b-b270-4581-b02f-f63c776d67c9 req-ebb6a411-c1c0-4916-b727-2c7dc7214487 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Refreshing network info cache for port 0364c1be-595c-4984-9173-39fd5163c9ad {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1820.139000] env[62619]: DEBUG nova.network.neutron [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Successfully updated port: 963179af-510c-47e9-a81a-2f99f2055e2c {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1820.471751] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778378, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.486825] env[62619]: INFO nova.compute.manager [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Took 30.51 seconds to build instance. [ 1820.613329] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "0987b6ec-2587-4f15-adbb-f563e19ecce9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.563s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.642154] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "refresh_cache-54da64a0-4acf-4025-9b51-7af61dbd55fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.642649] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "refresh_cache-54da64a0-4acf-4025-9b51-7af61dbd55fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.642649] env[62619]: DEBUG nova.network.neutron [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1820.657981] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.658069] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.708661] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.709464] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.709745] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.839693] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465967a9-6abb-4fc8-ac46-8e629f56c931 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.849393] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827a86b8-63aa-4d97-9e00-20cb81c7fd22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.884565] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1712e7f2-fb03-4d5e-85a4-f4342c6208f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.893912] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461122e0-b9c9-4e40-942d-8d050f9baa6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.910752] env[62619]: DEBUG nova.compute.provider_tree [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1820.965216] env[62619]: DEBUG nova.network.neutron [req-ca1c519b-b270-4581-b02f-f63c776d67c9 req-ebb6a411-c1c0-4916-b727-2c7dc7214487 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updated VIF entry in instance network info cache for port 0364c1be-595c-4984-9173-39fd5163c9ad. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1820.965848] env[62619]: DEBUG nova.network.neutron [req-ca1c519b-b270-4581-b02f-f63c776d67c9 req-ebb6a411-c1c0-4916-b727-2c7dc7214487 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [{"id": "0364c1be-595c-4984-9173-39fd5163c9ad", "address": "fa:16:3e:d1:3c:33", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364c1be-59", "ovs_interfaceid": "0364c1be-595c-4984-9173-39fd5163c9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.974778] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778378, 'name': CreateVM_Task, 'duration_secs': 0.671382} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.975013] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1820.977045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.977045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.977045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1820.977045] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b2f8109-8b74-4070-95e4-16a573044d43 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.983155] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1820.983155] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525a79b8-59db-7d24-49e0-79bcf76810d0" [ 1820.983155] env[62619]: _type = "Task" [ 1820.983155] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.988736] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da6d5371-5f20-4a41-b3d9-bf0cc7799831 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "d3aa352b-7d2d-416e-a579-9636619bb025" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.028s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.989166] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "d3aa352b-7d2d-416e-a579-9636619bb025" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.188s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.989820] env[62619]: INFO nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] During sync_power_state the instance has a pending task (spawning). Skip. [ 1820.990457] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "d3aa352b-7d2d-416e-a579-9636619bb025" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.996938] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525a79b8-59db-7d24-49e0-79bcf76810d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.164584] env[62619]: DEBUG nova.compute.manager [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1821.210044] env[62619]: DEBUG nova.network.neutron [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1821.216419] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.415426] env[62619]: DEBUG nova.scheduler.client.report [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1821.470458] env[62619]: DEBUG oslo_concurrency.lockutils [req-ca1c519b-b270-4581-b02f-f63c776d67c9 req-ebb6a411-c1c0-4916-b727-2c7dc7214487 service nova] Releasing lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.471711] env[62619]: DEBUG nova.network.neutron [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Updating instance_info_cache with network_info: [{"id": "963179af-510c-47e9-a81a-2f99f2055e2c", "address": "fa:16:3e:3b:90:cd", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap963179af-51", "ovs_interfaceid": "963179af-510c-47e9-a81a-2f99f2055e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1821.499011] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525a79b8-59db-7d24-49e0-79bcf76810d0, 'name': SearchDatastore_Task, 'duration_secs': 0.018633} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.499011] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.499011] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1821.499437] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.499652] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.499885] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1821.500211] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-437857f5-941f-471b-9920-96c8d0a444d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.511036] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1821.511036] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1821.511548] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0217d17f-be25-42ae-b611-c1daeea93780 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.518830] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1821.518830] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52de4d4e-bcfb-9f54-5b11-2ba57e6e06b1" [ 1821.518830] env[62619]: _type = "Task" [ 1821.518830] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.525350] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52de4d4e-bcfb-9f54-5b11-2ba57e6e06b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.691165] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.920142] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.923527] env[62619]: DEBUG nova.compute.manager [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1821.933018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.713s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.933018] env[62619]: INFO nova.compute.claims [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1821.979862] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "refresh_cache-54da64a0-4acf-4025-9b51-7af61dbd55fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.980849] env[62619]: DEBUG nova.compute.manager [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Instance network_info: |[{"id": "963179af-510c-47e9-a81a-2f99f2055e2c", "address": "fa:16:3e:3b:90:cd", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap963179af-51", "ovs_interfaceid": "963179af-510c-47e9-a81a-2f99f2055e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1821.982033] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:90:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '963179af-510c-47e9-a81a-2f99f2055e2c', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1821.989518] env[62619]: DEBUG oslo.service.loopingcall [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.990372] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1821.990669] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c67f110-c9c5-4ae7-88b2-210175347796 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.011602] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1822.011602] env[62619]: value = "task-1778379" [ 1822.011602] env[62619]: _type = "Task" [ 1822.011602] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.020342] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778379, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.028476] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52de4d4e-bcfb-9f54-5b11-2ba57e6e06b1, 'name': SearchDatastore_Task, 'duration_secs': 0.019005} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.029401] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ba01bf6-312f-45ca-95fa-2c26753a4834 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.035169] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1822.035169] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526b2a78-c485-9281-d983-194af2c3ced5" [ 1822.035169] env[62619]: _type = "Task" [ 1822.035169] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.044565] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526b2a78-c485-9281-d983-194af2c3ced5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.127592] env[62619]: DEBUG nova.compute.manager [req-f26f5536-b7d5-46ad-b14d-ba150c24f3ee req-3abf00ae-feef-4603-9311-da3a4e14eb07 service nova] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Received event network-changed-963179af-510c-47e9-a81a-2f99f2055e2c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1822.127703] env[62619]: DEBUG nova.compute.manager [req-f26f5536-b7d5-46ad-b14d-ba150c24f3ee req-3abf00ae-feef-4603-9311-da3a4e14eb07 service nova] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Refreshing instance network info cache due to event network-changed-963179af-510c-47e9-a81a-2f99f2055e2c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1822.129729] env[62619]: DEBUG oslo_concurrency.lockutils [req-f26f5536-b7d5-46ad-b14d-ba150c24f3ee req-3abf00ae-feef-4603-9311-da3a4e14eb07 service nova] Acquiring lock "refresh_cache-54da64a0-4acf-4025-9b51-7af61dbd55fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.129729] env[62619]: DEBUG oslo_concurrency.lockutils [req-f26f5536-b7d5-46ad-b14d-ba150c24f3ee req-3abf00ae-feef-4603-9311-da3a4e14eb07 service nova] Acquired lock "refresh_cache-54da64a0-4acf-4025-9b51-7af61dbd55fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.129729] env[62619]: DEBUG nova.network.neutron [req-f26f5536-b7d5-46ad-b14d-ba150c24f3ee req-3abf00ae-feef-4603-9311-da3a4e14eb07 service nova] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Refreshing network info cache for port 963179af-510c-47e9-a81a-2f99f2055e2c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1822.439468] env[62619]: DEBUG nova.compute.utils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1822.444492] env[62619]: DEBUG nova.compute.manager [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1822.444710] env[62619]: DEBUG nova.network.neutron [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1822.493548] env[62619]: DEBUG nova.policy [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91b100cc8f94b93af086dafebe29092', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c24c9d49d8d4104a0868f126eb3a26e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1822.521826] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778379, 'name': CreateVM_Task, 'duration_secs': 0.404755} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.522047] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1822.522706] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.522908] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.523329] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1822.523826] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa36ee94-4d2b-4023-bdf7-6b6731c02014 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.530212] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1822.530212] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5270a34c-3074-abe4-5399-28b17339b491" [ 1822.530212] env[62619]: _type = "Task" [ 1822.530212] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.538510] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5270a34c-3074-abe4-5399-28b17339b491, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.547714] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526b2a78-c485-9281-d983-194af2c3ced5, 'name': SearchDatastore_Task, 'duration_secs': 0.010512} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.547714] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.547714] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e/8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1822.547857] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64a4de4c-79a9-4056-a249-1214bee6e083 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.554262] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1822.554262] env[62619]: value = "task-1778380" [ 1822.554262] env[62619]: _type = "Task" [ 1822.554262] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.561951] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778380, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.751097] env[62619]: DEBUG nova.network.neutron [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Successfully created port: d8de64b8-2687-42e0-91e6-97aa76f28d9f {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1822.945840] env[62619]: DEBUG nova.compute.manager [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1822.964753] env[62619]: DEBUG nova.network.neutron [req-f26f5536-b7d5-46ad-b14d-ba150c24f3ee req-3abf00ae-feef-4603-9311-da3a4e14eb07 service nova] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Updated VIF entry in instance network info cache for port 963179af-510c-47e9-a81a-2f99f2055e2c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1822.966063] env[62619]: DEBUG nova.network.neutron [req-f26f5536-b7d5-46ad-b14d-ba150c24f3ee req-3abf00ae-feef-4603-9311-da3a4e14eb07 service nova] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Updating instance_info_cache with network_info: [{"id": "963179af-510c-47e9-a81a-2f99f2055e2c", "address": "fa:16:3e:3b:90:cd", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap963179af-51", "ovs_interfaceid": "963179af-510c-47e9-a81a-2f99f2055e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.011927] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.012074] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.012273] env[62619]: DEBUG nova.compute.manager [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1823.013410] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c537fe-3239-47a3-bdaf-220b764f41a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.029353] env[62619]: DEBUG nova.compute.manager [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1823.030508] env[62619]: DEBUG nova.objects.instance [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lazy-loading 'flavor' on Instance uuid cee0356c-25d7-48ca-be09-16b0e1b56a41 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1823.044271] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5270a34c-3074-abe4-5399-28b17339b491, 'name': SearchDatastore_Task, 'duration_secs': 0.009459} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.044454] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.044681] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1823.045318] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.045318] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.045318] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1823.045581] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8587489c-a415-4c20-b742-b45e4806159e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.061295] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1823.061500] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1823.062671] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2b3e948-b599-4804-ad0c-4f69d912f65d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.068634] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778380, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.077280] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1823.077280] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524019f7-fdc8-b7aa-c588-d8eed5440bc4" [ 1823.077280] env[62619]: _type = "Task" [ 1823.077280] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.089731] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524019f7-fdc8-b7aa-c588-d8eed5440bc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.216769] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5047f787-bc1a-444a-8b9d-4767b599f779 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.226364] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f0922b-034f-407c-832d-4d3723eb7b28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.255735] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade1339a-8710-410d-8ac4-8eab885dee7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.263216] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc75cf6-e7b6-4720-9948-1d8db0c198da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.276232] env[62619]: DEBUG nova.compute.provider_tree [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1823.468550] env[62619]: DEBUG oslo_concurrency.lockutils [req-f26f5536-b7d5-46ad-b14d-ba150c24f3ee req-3abf00ae-feef-4603-9311-da3a4e14eb07 service nova] Releasing lock "refresh_cache-54da64a0-4acf-4025-9b51-7af61dbd55fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.565869] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778380, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.586549] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524019f7-fdc8-b7aa-c588-d8eed5440bc4, 'name': SearchDatastore_Task, 'duration_secs': 0.028504} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.587304] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea502e14-12a8-4ede-b1bf-68948b59a9a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.592256] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1823.592256] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526bcfec-da8d-abfa-fc97-16ab42a98dfa" [ 1823.592256] env[62619]: _type = "Task" [ 1823.592256] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.599154] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526bcfec-da8d-abfa-fc97-16ab42a98dfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.779804] env[62619]: DEBUG nova.scheduler.client.report [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1823.962936] env[62619]: DEBUG nova.compute.manager [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1823.991476] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1823.991730] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1823.991887] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1823.992118] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1823.992284] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1823.992433] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1823.992628] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1823.992784] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1823.992949] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1823.993134] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1823.993307] env[62619]: DEBUG nova.virt.hardware [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1823.994243] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc1a669-cd57-45a4-a364-ce98e524f2ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.002342] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c7603c-50c8-48f2-8214-5f1a697ed128 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.041048] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1824.041048] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f854bafd-e71d-4c1b-aff9-c303190845b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.047881] env[62619]: DEBUG oslo_vmware.api [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1824.047881] env[62619]: value = "task-1778381" [ 1824.047881] env[62619]: _type = "Task" [ 1824.047881] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.056564] env[62619]: DEBUG oslo_vmware.api [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.065534] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778380, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.101997] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526bcfec-da8d-abfa-fc97-16ab42a98dfa, 'name': SearchDatastore_Task, 'duration_secs': 0.089933} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.102300] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.102558] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 54da64a0-4acf-4025-9b51-7af61dbd55fc/54da64a0-4acf-4025-9b51-7af61dbd55fc.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1824.102829] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88f49544-b311-4303-bfcb-ac622eebde68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.110252] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1824.110252] env[62619]: value = "task-1778382" [ 1824.110252] env[62619]: _type = "Task" [ 1824.110252] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.118319] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.212993] env[62619]: DEBUG nova.compute.manager [req-c57cffab-84c8-41a7-8fb3-889db596f699 req-6413503e-1c66-475a-8f27-56667e162c91 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Received event network-vif-plugged-d8de64b8-2687-42e0-91e6-97aa76f28d9f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1824.213322] env[62619]: DEBUG oslo_concurrency.lockutils [req-c57cffab-84c8-41a7-8fb3-889db596f699 req-6413503e-1c66-475a-8f27-56667e162c91 service nova] Acquiring lock "70265068-1185-4f23-b0b4-ed2378c17a89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.213640] env[62619]: DEBUG oslo_concurrency.lockutils [req-c57cffab-84c8-41a7-8fb3-889db596f699 req-6413503e-1c66-475a-8f27-56667e162c91 service nova] Lock "70265068-1185-4f23-b0b4-ed2378c17a89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.213849] env[62619]: DEBUG oslo_concurrency.lockutils [req-c57cffab-84c8-41a7-8fb3-889db596f699 req-6413503e-1c66-475a-8f27-56667e162c91 service nova] Lock "70265068-1185-4f23-b0b4-ed2378c17a89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.213994] env[62619]: DEBUG nova.compute.manager [req-c57cffab-84c8-41a7-8fb3-889db596f699 req-6413503e-1c66-475a-8f27-56667e162c91 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] No waiting events found dispatching network-vif-plugged-d8de64b8-2687-42e0-91e6-97aa76f28d9f {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1824.214245] env[62619]: WARNING nova.compute.manager [req-c57cffab-84c8-41a7-8fb3-889db596f699 req-6413503e-1c66-475a-8f27-56667e162c91 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Received unexpected event network-vif-plugged-d8de64b8-2687-42e0-91e6-97aa76f28d9f for instance with vm_state building and task_state spawning. [ 1824.285096] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.285755] env[62619]: DEBUG nova.compute.manager [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1824.291922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 12.360s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.316552] env[62619]: DEBUG nova.network.neutron [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Successfully updated port: d8de64b8-2687-42e0-91e6-97aa76f28d9f {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1824.561158] env[62619]: DEBUG oslo_vmware.api [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778381, 'name': PowerOffVM_Task, 'duration_secs': 0.375812} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.564545] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1824.564786] env[62619]: DEBUG nova.compute.manager [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1824.565817] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe8d606-e7a4-4c1c-b464-c2d573bbdc50 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.578589] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778380, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.5769} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.579362] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e/8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1824.579657] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1824.579950] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-927b718a-0851-4dcb-b9cd-e8c39ccc65d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.587622] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1824.587622] env[62619]: value = "task-1778383" [ 1824.587622] env[62619]: _type = "Task" [ 1824.587622] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.602018] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778383, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.621345] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778382, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.793724] env[62619]: DEBUG nova.compute.utils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1824.795304] env[62619]: DEBUG nova.objects.instance [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'migration_context' on Instance uuid 917960ca-3870-4e4e-aafe-3c6d77cf7c51 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1824.797287] env[62619]: DEBUG nova.compute.manager [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1824.797453] env[62619]: DEBUG nova.network.neutron [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1824.818699] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "refresh_cache-70265068-1185-4f23-b0b4-ed2378c17a89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.819234] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "refresh_cache-70265068-1185-4f23-b0b4-ed2378c17a89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.819442] env[62619]: DEBUG nova.network.neutron [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1825.050448] env[62619]: DEBUG nova.policy [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd106b23f779045f788b2536afd8c623d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2377a52a195d4f0b9181207ab5741734', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1825.082034] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c5a84669-96dd-4bbd-97c3-4f42569ae6ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.070s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.099469] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778383, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113179} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.099469] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1825.100378] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690c749c-901c-4fdc-b6eb-2add9df45ef5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.124759] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e/8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1825.128269] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3ac1498-17c9-4e0b-98d5-fb0aeea4f302 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.149045] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778382, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581771} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.149556] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 54da64a0-4acf-4025-9b51-7af61dbd55fc/54da64a0-4acf-4025-9b51-7af61dbd55fc.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1825.150567] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1825.150567] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1825.150567] env[62619]: value = "task-1778384" [ 1825.150567] env[62619]: _type = "Task" [ 1825.150567] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.150567] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d94de2fb-77e1-4a46-8e2a-28854104b35b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.160653] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778384, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.161713] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1825.161713] env[62619]: value = "task-1778385" [ 1825.161713] env[62619]: _type = "Task" [ 1825.161713] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.169164] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778385, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.300791] env[62619]: DEBUG nova.compute.manager [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1825.305251] env[62619]: DEBUG nova.network.neutron [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Successfully created port: 4db4c00e-c5a4-4990-ab07-206b511515d8 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1825.374146] env[62619]: DEBUG nova.network.neutron [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1825.451845] env[62619]: DEBUG nova.objects.instance [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lazy-loading 'flavor' on Instance uuid cee0356c-25d7-48ca-be09-16b0e1b56a41 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1825.560853] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3681b2-8582-4df9-9ced-403af1893178 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.562791] env[62619]: DEBUG nova.network.neutron [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Updating instance_info_cache with network_info: [{"id": "d8de64b8-2687-42e0-91e6-97aa76f28d9f", "address": "fa:16:3e:73:56:6e", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8de64b8-26", "ovs_interfaceid": "d8de64b8-2687-42e0-91e6-97aa76f28d9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.568818] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9e51d2-c2b8-45d7-94c9-a71371bd01f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.602622] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528988c7-7b7a-4e86-84ff-acdedfad3625 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.610416] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cbc8a9-347b-4c54-a0ac-11b86aa86633 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.624782] env[62619]: DEBUG nova.compute.provider_tree [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1825.662595] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778384, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.670732] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778385, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06533} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.671026] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1825.671823] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc444c2-4904-4a2d-8418-ca0981e98114 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.694053] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 54da64a0-4acf-4025-9b51-7af61dbd55fc/54da64a0-4acf-4025-9b51-7af61dbd55fc.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1825.694053] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f24dfeb9-e27e-4a85-8312-1ded420287a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.713278] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1825.713278] env[62619]: value = "task-1778386" [ 1825.713278] env[62619]: _type = "Task" [ 1825.713278] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.721089] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778386, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.957777] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "refresh_cache-cee0356c-25d7-48ca-be09-16b0e1b56a41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.957910] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquired lock "refresh_cache-cee0356c-25d7-48ca-be09-16b0e1b56a41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.958070] env[62619]: DEBUG nova.network.neutron [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1825.958186] env[62619]: DEBUG nova.objects.instance [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lazy-loading 'info_cache' on Instance uuid cee0356c-25d7-48ca-be09-16b0e1b56a41 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1826.065166] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "refresh_cache-70265068-1185-4f23-b0b4-ed2378c17a89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.065538] env[62619]: DEBUG nova.compute.manager [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Instance network_info: |[{"id": "d8de64b8-2687-42e0-91e6-97aa76f28d9f", "address": "fa:16:3e:73:56:6e", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8de64b8-26", "ovs_interfaceid": "d8de64b8-2687-42e0-91e6-97aa76f28d9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1826.066053] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:56:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd8de64b8-2687-42e0-91e6-97aa76f28d9f', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1826.076532] env[62619]: DEBUG oslo.service.loopingcall [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1826.076794] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1826.077045] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d059c5b-c46b-4fb1-a909-93545b790b78 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.096835] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1826.096835] env[62619]: value = "task-1778387" [ 1826.096835] env[62619]: _type = "Task" [ 1826.096835] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.108189] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778387, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.128517] env[62619]: DEBUG nova.scheduler.client.report [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1826.163493] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778384, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.223228] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778386, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.264308] env[62619]: DEBUG nova.compute.manager [req-82ef0379-c3e8-462f-b782-5df65bf298b3 req-33d7653c-476b-42bc-a713-6070a417fb23 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Received event network-changed-d8de64b8-2687-42e0-91e6-97aa76f28d9f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1826.264543] env[62619]: DEBUG nova.compute.manager [req-82ef0379-c3e8-462f-b782-5df65bf298b3 req-33d7653c-476b-42bc-a713-6070a417fb23 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Refreshing instance network info cache due to event network-changed-d8de64b8-2687-42e0-91e6-97aa76f28d9f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1826.264935] env[62619]: DEBUG oslo_concurrency.lockutils [req-82ef0379-c3e8-462f-b782-5df65bf298b3 req-33d7653c-476b-42bc-a713-6070a417fb23 service nova] Acquiring lock "refresh_cache-70265068-1185-4f23-b0b4-ed2378c17a89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.265178] env[62619]: DEBUG oslo_concurrency.lockutils [req-82ef0379-c3e8-462f-b782-5df65bf298b3 req-33d7653c-476b-42bc-a713-6070a417fb23 service nova] Acquired lock "refresh_cache-70265068-1185-4f23-b0b4-ed2378c17a89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.265459] env[62619]: DEBUG nova.network.neutron [req-82ef0379-c3e8-462f-b782-5df65bf298b3 req-33d7653c-476b-42bc-a713-6070a417fb23 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Refreshing network info cache for port d8de64b8-2687-42e0-91e6-97aa76f28d9f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1826.313802] env[62619]: DEBUG nova.compute.manager [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1826.345561] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1826.345862] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1826.346053] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1826.346251] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1826.346410] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1826.346632] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1826.346925] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1826.347116] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1826.347294] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1826.347457] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1826.347661] env[62619]: DEBUG nova.virt.hardware [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1826.349884] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8924ba-9654-4107-8f75-a927f3b3bf1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.357967] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a4b76f-f300-4d57-ac87-7c563fd25db1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.462617] env[62619]: DEBUG nova.objects.base [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1826.607316] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778387, 'name': CreateVM_Task, 'duration_secs': 0.372348} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.607549] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1826.608307] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.608471] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.608941] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1826.609087] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0921a2e1-8e3c-41cc-9ef1-da46cf011dfd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.613769] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1826.613769] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5240998a-62ad-e2b4-e072-152df0127f7d" [ 1826.613769] env[62619]: _type = "Task" [ 1826.613769] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.622073] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5240998a-62ad-e2b4-e072-152df0127f7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.662401] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778384, 'name': ReconfigVM_Task, 'duration_secs': 1.151483} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.662616] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e/8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1826.663353] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f106411-2152-4436-8076-f207ef98663a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.669710] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1826.669710] env[62619]: value = "task-1778388" [ 1826.669710] env[62619]: _type = "Task" [ 1826.669710] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.677802] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778388, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.725684] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778386, 'name': ReconfigVM_Task, 'duration_secs': 0.714629} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.725972] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 54da64a0-4acf-4025-9b51-7af61dbd55fc/54da64a0-4acf-4025-9b51-7af61dbd55fc.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1826.726605] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa16acca-620b-48e6-9114-829605cfabee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.732761] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1826.732761] env[62619]: value = "task-1778389" [ 1826.732761] env[62619]: _type = "Task" [ 1826.732761] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.741346] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778389, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.799363] env[62619]: DEBUG nova.network.neutron [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Successfully updated port: 4db4c00e-c5a4-4990-ab07-206b511515d8 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1826.980056] env[62619]: DEBUG nova.network.neutron [req-82ef0379-c3e8-462f-b782-5df65bf298b3 req-33d7653c-476b-42bc-a713-6070a417fb23 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Updated VIF entry in instance network info cache for port d8de64b8-2687-42e0-91e6-97aa76f28d9f. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1826.980467] env[62619]: DEBUG nova.network.neutron [req-82ef0379-c3e8-462f-b782-5df65bf298b3 req-33d7653c-476b-42bc-a713-6070a417fb23 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Updating instance_info_cache with network_info: [{"id": "d8de64b8-2687-42e0-91e6-97aa76f28d9f", "address": "fa:16:3e:73:56:6e", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8de64b8-26", "ovs_interfaceid": "d8de64b8-2687-42e0-91e6-97aa76f28d9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.125111] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5240998a-62ad-e2b4-e072-152df0127f7d, 'name': SearchDatastore_Task, 'duration_secs': 0.011384} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.125484] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.125724] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1827.125957] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.126118] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.126293] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1827.126580] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f66801ae-8d3e-4c5e-b811-c9877e2b0196 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.139897] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.851s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.147618] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.525s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.148603] env[62619]: DEBUG nova.objects.instance [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lazy-loading 'resources' on Instance uuid e19650da-cc3d-4350-be3e-dc776ce68206 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1827.149012] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1827.149209] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1827.151982] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a04fb8a3-70b8-43c4-8015-f1915f9d1d29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.162143] env[62619]: DEBUG nova.network.neutron [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Updating instance_info_cache with network_info: [{"id": "105882b4-fd3c-4267-bcf1-662dacfc582f", "address": "fa:16:3e:e8:02:49", "network": {"id": "91d6666a-8674-4957-8cbe-8e730e5c0741", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1893713214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d9cfa8d5b184320ae434919598191ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap105882b4-fd", "ovs_interfaceid": "105882b4-fd3c-4267-bcf1-662dacfc582f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.165662] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1827.165662] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5202dc87-695d-0646-aeb3-8964b4bfc4aa" [ 1827.165662] env[62619]: _type = "Task" [ 1827.165662] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.181289] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5202dc87-695d-0646-aeb3-8964b4bfc4aa, 'name': SearchDatastore_Task, 'duration_secs': 0.009605} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.185187] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778388, 'name': Rename_Task, 'duration_secs': 0.153606} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.185391] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de56eb24-8f1f-42a0-a360-fac308af2782 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.187540] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1827.187759] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00a7bca2-0450-4930-ad67-c52f62b472d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.191851] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1827.191851] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e9cae0-3e87-364a-ec98-a1b2e19325f8" [ 1827.191851] env[62619]: _type = "Task" [ 1827.191851] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.195851] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1827.195851] env[62619]: value = "task-1778390" [ 1827.195851] env[62619]: _type = "Task" [ 1827.195851] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.201860] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e9cae0-3e87-364a-ec98-a1b2e19325f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.206308] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778390, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.241940] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778389, 'name': Rename_Task, 'duration_secs': 0.209594} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.242229] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1827.242478] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ae9ad1b-4f54-4403-a907-b242daba8bdb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.248823] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1827.248823] env[62619]: value = "task-1778391" [ 1827.248823] env[62619]: _type = "Task" [ 1827.248823] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.256501] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.305295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "refresh_cache-1d40d434-fa8f-463e-908a-24c61538fe33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.305434] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "refresh_cache-1d40d434-fa8f-463e-908a-24c61538fe33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.305626] env[62619]: DEBUG nova.network.neutron [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1827.482801] env[62619]: DEBUG oslo_concurrency.lockutils [req-82ef0379-c3e8-462f-b782-5df65bf298b3 req-33d7653c-476b-42bc-a713-6070a417fb23 service nova] Releasing lock "refresh_cache-70265068-1185-4f23-b0b4-ed2378c17a89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.665280] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Releasing lock "refresh_cache-cee0356c-25d7-48ca-be09-16b0e1b56a41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.705691] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e9cae0-3e87-364a-ec98-a1b2e19325f8, 'name': SearchDatastore_Task, 'duration_secs': 0.012343} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.706462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.706785] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 70265068-1185-4f23-b0b4-ed2378c17a89/70265068-1185-4f23-b0b4-ed2378c17a89.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1827.707066] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ee43ae9-98e7-45ed-bbf1-9d74a48d5bdc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.714411] env[62619]: DEBUG oslo_vmware.api [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778390, 'name': PowerOnVM_Task, 'duration_secs': 0.499469} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.715224] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1827.715484] env[62619]: INFO nova.compute.manager [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Took 10.59 seconds to spawn the instance on the hypervisor. [ 1827.715705] env[62619]: DEBUG nova.compute.manager [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1827.716481] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47ff9c6-cd23-4c0e-b2d4-c04565d95a40 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.720327] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1827.720327] env[62619]: value = "task-1778392" [ 1827.720327] env[62619]: _type = "Task" [ 1827.720327] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.734540] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.764521] env[62619]: DEBUG oslo_vmware.api [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778391, 'name': PowerOnVM_Task, 'duration_secs': 0.443608} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.764896] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1827.765192] env[62619]: INFO nova.compute.manager [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Took 8.15 seconds to spawn the instance on the hypervisor. [ 1827.765452] env[62619]: DEBUG nova.compute.manager [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1827.767385] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744b1e84-371e-4222-92ae-fe214649a990 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.843036] env[62619]: DEBUG nova.network.neutron [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1827.920173] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0799a86f-e909-4db6-b4f3-c76bea5dfa7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.927640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861047ee-e887-4477-b2df-eca1e6357383 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.964830] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2798ab31-f543-4657-9483-e997b91f1efc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.974298] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f64bea-d660-4922-9cfd-a493e3cd8b5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.991505] env[62619]: DEBUG nova.compute.provider_tree [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.013257] env[62619]: DEBUG nova.network.neutron [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Updating instance_info_cache with network_info: [{"id": "4db4c00e-c5a4-4990-ab07-206b511515d8", "address": "fa:16:3e:f3:05:14", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4db4c00e-c5", "ovs_interfaceid": "4db4c00e-c5a4-4990-ab07-206b511515d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.236916] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778392, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.238316] env[62619]: INFO nova.compute.manager [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Took 33.39 seconds to build instance. [ 1828.293244] env[62619]: DEBUG nova.compute.manager [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Received event network-vif-plugged-4db4c00e-c5a4-4990-ab07-206b511515d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1828.293369] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] Acquiring lock "1d40d434-fa8f-463e-908a-24c61538fe33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.293473] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] Lock "1d40d434-fa8f-463e-908a-24c61538fe33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.293655] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] Lock "1d40d434-fa8f-463e-908a-24c61538fe33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.293795] env[62619]: DEBUG nova.compute.manager [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] No waiting events found dispatching network-vif-plugged-4db4c00e-c5a4-4990-ab07-206b511515d8 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1828.293962] env[62619]: WARNING nova.compute.manager [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Received unexpected event network-vif-plugged-4db4c00e-c5a4-4990-ab07-206b511515d8 for instance with vm_state building and task_state spawning. [ 1828.294147] env[62619]: DEBUG nova.compute.manager [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Received event network-changed-4db4c00e-c5a4-4990-ab07-206b511515d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1828.294306] env[62619]: DEBUG nova.compute.manager [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Refreshing instance network info cache due to event network-changed-4db4c00e-c5a4-4990-ab07-206b511515d8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1828.294471] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] Acquiring lock "refresh_cache-1d40d434-fa8f-463e-908a-24c61538fe33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.295147] env[62619]: INFO nova.compute.manager [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Took 31.97 seconds to build instance. [ 1828.495242] env[62619]: DEBUG nova.scheduler.client.report [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1828.517110] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "refresh_cache-1d40d434-fa8f-463e-908a-24c61538fe33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.517337] env[62619]: DEBUG nova.compute.manager [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Instance network_info: |[{"id": "4db4c00e-c5a4-4990-ab07-206b511515d8", "address": "fa:16:3e:f3:05:14", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4db4c00e-c5", "ovs_interfaceid": "4db4c00e-c5a4-4990-ab07-206b511515d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1828.517679] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] Acquired lock "refresh_cache-1d40d434-fa8f-463e-908a-24c61538fe33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.517871] env[62619]: DEBUG nova.network.neutron [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Refreshing network info cache for port 4db4c00e-c5a4-4990-ab07-206b511515d8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1828.522380] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:05:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4db4c00e-c5a4-4990-ab07-206b511515d8', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1828.532808] env[62619]: DEBUG oslo.service.loopingcall [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1828.532808] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1828.532808] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51779169-c97e-4abc-81fc-851630383de6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.552887] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1828.552887] env[62619]: value = "task-1778393" [ 1828.552887] env[62619]: _type = "Task" [ 1828.552887] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.562055] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778393, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.676139] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1828.679830] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f110f2d-13a8-4791-bdad-09adec8d3fba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.688448] env[62619]: DEBUG oslo_vmware.api [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1828.688448] env[62619]: value = "task-1778394" [ 1828.688448] env[62619]: _type = "Task" [ 1828.688448] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.697687] env[62619]: DEBUG oslo_vmware.api [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778394, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.702414] env[62619]: INFO nova.compute.manager [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Swapping old allocation on dict_keys(['e814b747-ed75-487b-a97d-acf66bc6db0b']) held by migration 66600ae4-092e-4962-9c0b-51362373240f for instance [ 1828.731068] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778392, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587302} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.732127] env[62619]: DEBUG nova.scheduler.client.report [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Overwriting current allocation {'allocations': {'e814b747-ed75-487b-a97d-acf66bc6db0b': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 151}}, 'project_id': 'c82fb42e93ff479b971f49eb92f50832', 'user_id': 'fe6ee1c8d8ef4b718274da7be4f5fd01', 'consumer_generation': 1} on consumer 917960ca-3870-4e4e-aafe-3c6d77cf7c51 {{(pid=62619) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1828.734150] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 70265068-1185-4f23-b0b4-ed2378c17a89/70265068-1185-4f23-b0b4-ed2378c17a89.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1828.734439] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1828.734984] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee0564ce-6d0a-4ecb-b81d-5c2884ed30ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.742260] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8b2ea95-8327-4965-9df0-fc8a23afeed8 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.921s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.742260] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1828.742260] env[62619]: value = "task-1778395" [ 1828.742260] env[62619]: _type = "Task" [ 1828.742260] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.742260] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.940s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.742585] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31c880a-9a3c-4e7b-8d7b-49ecb0c3e528 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.753854] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778395, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.797605] env[62619]: DEBUG oslo_concurrency.lockutils [None req-538673a9-77bb-45bf-a82c-d3cd3a5ebb58 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "54da64a0-4acf-4025-9b51-7af61dbd55fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.481s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.837589] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.837742] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.837914] env[62619]: DEBUG nova.network.neutron [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1829.002730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.855s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.005721] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.730s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.010028] env[62619]: INFO nova.compute.claims [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1829.027177] env[62619]: INFO nova.scheduler.client.report [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Deleted allocations for instance e19650da-cc3d-4350-be3e-dc776ce68206 [ 1829.067139] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778393, 'name': CreateVM_Task, 'duration_secs': 0.380885} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.067267] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1829.068401] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.069097] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.069097] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1829.069480] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1152ab6b-332b-4e0b-a40e-02b2f7d41c6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.075327] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1829.075327] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bad7c1-77f7-84be-78e8-f6b849ab66ad" [ 1829.075327] env[62619]: _type = "Task" [ 1829.075327] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.093933] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bad7c1-77f7-84be-78e8-f6b849ab66ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.198743] env[62619]: DEBUG oslo_vmware.api [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778394, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.253548] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778395, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090492} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.253856] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1829.254653] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de9321f-d5b2-4a44-9122-1ce5a218d4cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.270142] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.528s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.279061] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 70265068-1185-4f23-b0b4-ed2378c17a89/70265068-1185-4f23-b0b4-ed2378c17a89.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1829.279682] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec612282-b83a-47bb-ae2b-f2a2ba9d575a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.294895] env[62619]: DEBUG nova.network.neutron [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Updated VIF entry in instance network info cache for port 4db4c00e-c5a4-4990-ab07-206b511515d8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1829.295261] env[62619]: DEBUG nova.network.neutron [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Updating instance_info_cache with network_info: [{"id": "4db4c00e-c5a4-4990-ab07-206b511515d8", "address": "fa:16:3e:f3:05:14", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4db4c00e-c5", "ovs_interfaceid": "4db4c00e-c5a4-4990-ab07-206b511515d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.301328] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1829.301328] env[62619]: value = "task-1778396" [ 1829.301328] env[62619]: _type = "Task" [ 1829.301328] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.310071] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.536028] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2fee668-59d0-433e-972e-280cfe8e4252 tempest-ImagesOneServerTestJSON-617082992 tempest-ImagesOneServerTestJSON-617082992-project-member] Lock "e19650da-cc3d-4350-be3e-dc776ce68206" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.709s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.536368] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "e19650da-cc3d-4350-be3e-dc776ce68206" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.736s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.536606] env[62619]: INFO nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] During sync_power_state the instance has a pending task (deleting). Skip. [ 1829.536847] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "e19650da-cc3d-4350-be3e-dc776ce68206" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.559921] env[62619]: DEBUG nova.network.neutron [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance_info_cache with network_info: [{"id": "5911524f-a8b5-4591-a312-ea0cefac24df", "address": "fa:16:3e:bd:c4:f9", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5911524f-a8", "ovs_interfaceid": "5911524f-a8b5-4591-a312-ea0cefac24df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.587752] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52bad7c1-77f7-84be-78e8-f6b849ab66ad, 'name': SearchDatastore_Task, 'duration_secs': 0.027194} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.587752] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.587752] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1829.587752] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.587752] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.588092] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1829.588092] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdc20617-385b-4255-90f4-f04026b2bc9a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.598492] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1829.598679] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1829.599421] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec39ab17-59ce-47af-af02-6a5a42ac6ace {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.604806] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1829.604806] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52735488-0b59-e25f-8b50-1767e6a10ed3" [ 1829.604806] env[62619]: _type = "Task" [ 1829.604806] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.614139] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52735488-0b59-e25f-8b50-1767e6a10ed3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.627859] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "54da64a0-4acf-4025-9b51-7af61dbd55fc" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.628155] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "54da64a0-4acf-4025-9b51-7af61dbd55fc" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.628385] env[62619]: DEBUG nova.compute.manager [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1829.629391] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851145cc-5bb8-4a94-83fa-074112b8539e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.635738] env[62619]: DEBUG nova.compute.manager [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1829.636474] env[62619]: DEBUG nova.objects.instance [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lazy-loading 'flavor' on Instance uuid 54da64a0-4acf-4025-9b51-7af61dbd55fc {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1829.699128] env[62619]: DEBUG oslo_vmware.api [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778394, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.798304] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3741122-af60-4bcf-a382-ecd8b6095ef8 req-e2f2560d-5742-4fd4-b759-6d872e2b0577 service nova] Releasing lock "refresh_cache-1d40d434-fa8f-463e-908a-24c61538fe33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.810982] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.063751] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-917960ca-3870-4e4e-aafe-3c6d77cf7c51" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.064927] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1021780-170f-4c21-bab5-71d4463e91ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.072580] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad7efc5-4273-411a-89b9-8b49325a37ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.101883] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "52b29fef-eab6-4541-a570-af9c0c021a75" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.101883] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.115914] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52735488-0b59-e25f-8b50-1767e6a10ed3, 'name': SearchDatastore_Task, 'duration_secs': 0.022672} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.118741] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff1a7554-b728-4252-8e17-3c68969e81d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.124813] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1830.124813] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e209-589b-5bfe-ff7f-00ef3331450e" [ 1830.124813] env[62619]: _type = "Task" [ 1830.124813] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.132834] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e209-589b-5bfe-ff7f-00ef3331450e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.202146] env[62619]: DEBUG oslo_vmware.api [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778394, 'name': PowerOnVM_Task, 'duration_secs': 1.105509} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.204888] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1830.205159] env[62619]: DEBUG nova.compute.manager [None req-4c502f71-02ff-4688-b8af-d0a68f4ae1ba tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1830.206394] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269d8bab-0da4-494e-ad14-d5a7cc44e83e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.254263] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0deff7db-fc27-42f3-bf67-06b46e747e98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.263323] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c258fd1-af8a-4a16-a924-eb06f0a905d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.299308] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7ab4fb-d736-41ea-83f2-6fc0ea7df4e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.314875] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e91a286-280e-4174-93b2-b298b56c511b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.317739] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778396, 'name': ReconfigVM_Task, 'duration_secs': 0.668617} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.317892] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 70265068-1185-4f23-b0b4-ed2378c17a89/70265068-1185-4f23-b0b4-ed2378c17a89.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1830.318935] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-764a4093-d985-4a40-a233-aa389e6aa63b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.329224] env[62619]: DEBUG nova.compute.provider_tree [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1830.336233] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1830.336233] env[62619]: value = "task-1778397" [ 1830.336233] env[62619]: _type = "Task" [ 1830.336233] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.344652] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778397, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.495283] env[62619]: DEBUG nova.compute.manager [req-c834d6b1-eca4-4857-8e9c-d4b76865b86a req-02a780bd-c790-4d53-bef3-6370534e99bc service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received event network-changed-0364c1be-595c-4984-9173-39fd5163c9ad {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1830.495283] env[62619]: DEBUG nova.compute.manager [req-c834d6b1-eca4-4857-8e9c-d4b76865b86a req-02a780bd-c790-4d53-bef3-6370534e99bc service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Refreshing instance network info cache due to event network-changed-0364c1be-595c-4984-9173-39fd5163c9ad. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1830.495283] env[62619]: DEBUG oslo_concurrency.lockutils [req-c834d6b1-eca4-4857-8e9c-d4b76865b86a req-02a780bd-c790-4d53-bef3-6370534e99bc service nova] Acquiring lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.495283] env[62619]: DEBUG oslo_concurrency.lockutils [req-c834d6b1-eca4-4857-8e9c-d4b76865b86a req-02a780bd-c790-4d53-bef3-6370534e99bc service nova] Acquired lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.495283] env[62619]: DEBUG nova.network.neutron [req-c834d6b1-eca4-4857-8e9c-d4b76865b86a req-02a780bd-c790-4d53-bef3-6370534e99bc service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Refreshing network info cache for port 0364c1be-595c-4984-9173-39fd5163c9ad {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1830.607232] env[62619]: DEBUG nova.compute.utils [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1830.636641] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e209-589b-5bfe-ff7f-00ef3331450e, 'name': SearchDatastore_Task, 'duration_secs': 0.014441} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.639878] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.640157] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1d40d434-fa8f-463e-908a-24c61538fe33/1d40d434-fa8f-463e-908a-24c61538fe33.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1830.640877] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94e914bb-1220-4c04-aedf-2d21347eddbe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.645281] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1830.645281] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d30281d7-7207-4fa8-87b1-2ca2277e91ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.650308] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1830.650308] env[62619]: value = "task-1778398" [ 1830.650308] env[62619]: _type = "Task" [ 1830.650308] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.651613] env[62619]: DEBUG oslo_vmware.api [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1830.651613] env[62619]: value = "task-1778399" [ 1830.651613] env[62619]: _type = "Task" [ 1830.651613] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.666437] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778398, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.667794] env[62619]: DEBUG oslo_vmware.api [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778399, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.833014] env[62619]: DEBUG nova.scheduler.client.report [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1830.846881] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778397, 'name': Rename_Task, 'duration_secs': 0.155219} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.847212] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1830.847430] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6377e97-f6e1-484d-aaa8-c3564859177d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.853582] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1830.853582] env[62619]: value = "task-1778400" [ 1830.853582] env[62619]: _type = "Task" [ 1830.853582] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.862985] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778400, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.111045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.165732] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778398, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.172493] env[62619]: DEBUG oslo_vmware.api [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778399, 'name': PowerOffVM_Task, 'duration_secs': 0.248129} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.172804] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1831.173014] env[62619]: DEBUG nova.compute.manager [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1831.174224] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eed9d81-af8d-4c3f-b0d3-82ba52d17142 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.178781] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1831.178781] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e10cd91a-cbb8-4454-8f0c-32e132e7d45d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.187660] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1831.187660] env[62619]: value = "task-1778401" [ 1831.187660] env[62619]: _type = "Task" [ 1831.187660] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.199502] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.290499] env[62619]: DEBUG nova.network.neutron [req-c834d6b1-eca4-4857-8e9c-d4b76865b86a req-02a780bd-c790-4d53-bef3-6370534e99bc service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updated VIF entry in instance network info cache for port 0364c1be-595c-4984-9173-39fd5163c9ad. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1831.290876] env[62619]: DEBUG nova.network.neutron [req-c834d6b1-eca4-4857-8e9c-d4b76865b86a req-02a780bd-c790-4d53-bef3-6370534e99bc service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [{"id": "0364c1be-595c-4984-9173-39fd5163c9ad", "address": "fa:16:3e:d1:3c:33", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364c1be-59", "ovs_interfaceid": "0364c1be-595c-4984-9173-39fd5163c9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1831.344736] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.344736] env[62619]: DEBUG nova.compute.manager [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1831.347034] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.131s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.347452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.347786] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1831.348207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.657s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.350205] env[62619]: INFO nova.compute.claims [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1831.353884] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f63e20-9646-4d58-8ec4-97d142346849 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.370380] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fd2962-2b7e-4e43-ae8a-bd37db839bbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.374882] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778400, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.390637] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e972a7d-8d1e-4f9e-98e5-daa18e1601f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.400337] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72ff482-4019-49c8-8fac-66ff103e705f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.434031] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179373MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1831.434205] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.669530] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778398, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639177} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.669932] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1d40d434-fa8f-463e-908a-24c61538fe33/1d40d434-fa8f-463e-908a-24c61538fe33.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1831.670747] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1831.670747] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4aa0d835-0db6-4523-bc6f-f23da83d2ccf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.677952] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1831.677952] env[62619]: value = "task-1778402" [ 1831.677952] env[62619]: _type = "Task" [ 1831.677952] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.687312] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778402, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.694860] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f86a74b-85e8-4721-86f8-a55b55aefff8 tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "54da64a0-4acf-4025-9b51-7af61dbd55fc" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.067s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.702118] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778401, 'name': PowerOffVM_Task, 'duration_secs': 0.259556} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.702118] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1831.702118] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1831.702376] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1831.702465] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1831.702641] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1831.702837] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1831.702996] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1831.703235] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1831.703450] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1831.703628] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1831.703800] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1831.703981] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1831.714065] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec319028-b52a-45df-b08b-3dfcd0c4356e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.737023] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1831.737023] env[62619]: value = "task-1778403" [ 1831.737023] env[62619]: _type = "Task" [ 1831.737023] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.747544] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778403, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.794159] env[62619]: DEBUG oslo_concurrency.lockutils [req-c834d6b1-eca4-4857-8e9c-d4b76865b86a req-02a780bd-c790-4d53-bef3-6370534e99bc service nova] Releasing lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.854920] env[62619]: DEBUG nova.compute.utils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1831.856415] env[62619]: DEBUG nova.compute.manager [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1831.856588] env[62619]: DEBUG nova.network.neutron [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1831.874373] env[62619]: DEBUG oslo_vmware.api [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778400, 'name': PowerOnVM_Task, 'duration_secs': 0.622147} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.874658] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1831.874876] env[62619]: INFO nova.compute.manager [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Took 7.91 seconds to spawn the instance on the hypervisor. [ 1831.875068] env[62619]: DEBUG nova.compute.manager [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1831.878750] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8b9580-62b6-4d38-bc02-4c01438491d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.928314] env[62619]: DEBUG nova.policy [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cfeec8beaab74577a128c470fffd5882', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a13f69e23df4b36a08f2f0a90734f6a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1832.191108] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778402, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088532} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.191108] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1832.192427] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cad0a5f-6f04-4881-8164-f1056e44c871 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.222040] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 1d40d434-fa8f-463e-908a-24c61538fe33/1d40d434-fa8f-463e-908a-24c61538fe33.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1832.222888] env[62619]: DEBUG nova.network.neutron [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Successfully created port: ffd85085-7cdb-4167-99cf-28e1267cf58b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1832.224833] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58b79f97-5ca2-43f2-8398-859f42320648 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.242115] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "52b29fef-eab6-4541-a570-af9c0c021a75" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.242115] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.242115] env[62619]: INFO nova.compute.manager [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Attaching volume 292d6452-98d6-460a-b9d5-6f63c5392ab1 to /dev/sdb [ 1832.253659] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778403, 'name': ReconfigVM_Task, 'duration_secs': 0.16185} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.255105] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1832.255105] env[62619]: value = "task-1778404" [ 1832.255105] env[62619]: _type = "Task" [ 1832.255105] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.255947] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03bc045c-ddd6-4ca8-a771-c53c80578b08 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.282800] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1832.283151] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1832.283386] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1832.283646] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1832.283906] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1832.284146] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1832.284481] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1832.284718] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1832.285823] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1832.285823] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1832.285823] env[62619]: DEBUG nova.virt.hardware [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1832.291009] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a24f5ed-fa2e-41c6-b828-1d971f9980f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.294177] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b7add6-9579-4261-a872-5012a36940a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.297229] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778404, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.303383] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d061279-3126-48fc-b351-78f88bfcb8c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.306382] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1832.306382] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c51046-119c-b028-6c61-2efc9d5f1132" [ 1832.306382] env[62619]: _type = "Task" [ 1832.306382] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.315578] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c51046-119c-b028-6c61-2efc9d5f1132, 'name': SearchDatastore_Task, 'duration_secs': 0.008194} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.324913] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1832.325685] env[62619]: DEBUG nova.virt.block_device [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updating existing volume attachment record: 440051e2-5b38-4a6e-bc7b-a57d21455ee5 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1832.329853] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-becb7acf-bfe7-4ee4-ac8d-29027f260b1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.347618] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1832.347618] env[62619]: value = "task-1778405" [ 1832.347618] env[62619]: _type = "Task" [ 1832.347618] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.356620] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778405, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.360267] env[62619]: DEBUG nova.compute.manager [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1832.395018] env[62619]: INFO nova.compute.manager [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Took 28.46 seconds to build instance. [ 1832.666808] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51df221c-b85e-4140-b359-49a495dfe4eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.674971] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf77f15-2e74-43bb-aecf-e022292c6479 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.706939] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89507c5f-42e5-4293-a8ab-850b06c00752 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.718110] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2716947d-b784-4b33-ac78-195bb77b6031 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.734586] env[62619]: DEBUG nova.compute.provider_tree [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1832.770338] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778404, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.859702] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778405, 'name': ReconfigVM_Task, 'duration_secs': 0.3263} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.859993] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1832.860870] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e820a8a7-2840-449d-9c8d-f49dfcda04b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.904757] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 917960ca-3870-4e4e-aafe-3c6d77cf7c51/917960ca-3870-4e4e-aafe-3c6d77cf7c51.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1832.906474] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55d0b74c-e821-4417-95d9-528508b5f716 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "70265068-1185-4f23-b0b4-ed2378c17a89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.985s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.906779] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-745141c3-41c6-4225-934e-bf32cfa4e165 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.939884] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1832.939884] env[62619]: value = "task-1778409" [ 1832.939884] env[62619]: _type = "Task" [ 1832.939884] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.953350] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778409, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.001622] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "54da64a0-4acf-4025-9b51-7af61dbd55fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.002046] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "54da64a0-4acf-4025-9b51-7af61dbd55fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.002412] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "54da64a0-4acf-4025-9b51-7af61dbd55fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.002744] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "54da64a0-4acf-4025-9b51-7af61dbd55fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.003085] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "54da64a0-4acf-4025-9b51-7af61dbd55fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.006062] env[62619]: INFO nova.compute.manager [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Terminating instance [ 1833.238043] env[62619]: DEBUG nova.scheduler.client.report [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1833.271275] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778404, 'name': ReconfigVM_Task, 'duration_secs': 0.765748} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.271539] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 1d40d434-fa8f-463e-908a-24c61538fe33/1d40d434-fa8f-463e-908a-24c61538fe33.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1833.272223] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b16c02e7-ea86-4bd7-8f03-2b7853b08069 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.280129] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1833.280129] env[62619]: value = "task-1778410" [ 1833.280129] env[62619]: _type = "Task" [ 1833.280129] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.291909] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778410, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.368974] env[62619]: DEBUG nova.compute.manager [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1833.398109] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1833.398245] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1833.398335] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1833.398619] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1833.398767] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1833.398883] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1833.399118] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1833.399277] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1833.399439] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1833.399600] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1833.399771] env[62619]: DEBUG nova.virt.hardware [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1833.400659] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69af7c0c-76e4-46dd-a595-7243c8721278 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.409078] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3b0ff2-05eb-4205-a251-9daed36f3e5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.448218] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778409, 'name': ReconfigVM_Task, 'duration_secs': 0.294564} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.448503] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 917960ca-3870-4e4e-aafe-3c6d77cf7c51/917960ca-3870-4e4e-aafe-3c6d77cf7c51.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1833.449343] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac0284f-496b-4ac7-a564-c36d5d920550 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.469785] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e389e6e9-ef18-4f59-bbcd-db9505cd6423 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.491399] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170259d3-d53f-44ad-9f8e-b6c3cd83e438 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.494641] env[62619]: DEBUG nova.compute.manager [req-85a3204f-7ceb-4641-b4a6-570a8df7837d req-d977bc9a-abc1-4ead-ac74-5b9189191429 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Received event network-changed-d8de64b8-2687-42e0-91e6-97aa76f28d9f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1833.494828] env[62619]: DEBUG nova.compute.manager [req-85a3204f-7ceb-4641-b4a6-570a8df7837d req-d977bc9a-abc1-4ead-ac74-5b9189191429 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Refreshing instance network info cache due to event network-changed-d8de64b8-2687-42e0-91e6-97aa76f28d9f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1833.495047] env[62619]: DEBUG oslo_concurrency.lockutils [req-85a3204f-7ceb-4641-b4a6-570a8df7837d req-d977bc9a-abc1-4ead-ac74-5b9189191429 service nova] Acquiring lock "refresh_cache-70265068-1185-4f23-b0b4-ed2378c17a89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.495259] env[62619]: DEBUG oslo_concurrency.lockutils [req-85a3204f-7ceb-4641-b4a6-570a8df7837d req-d977bc9a-abc1-4ead-ac74-5b9189191429 service nova] Acquired lock "refresh_cache-70265068-1185-4f23-b0b4-ed2378c17a89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.495347] env[62619]: DEBUG nova.network.neutron [req-85a3204f-7ceb-4641-b4a6-570a8df7837d req-d977bc9a-abc1-4ead-ac74-5b9189191429 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Refreshing network info cache for port d8de64b8-2687-42e0-91e6-97aa76f28d9f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1833.517337] env[62619]: DEBUG nova.compute.manager [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1833.517337] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1833.518660] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224d7d1a-1086-43dc-8035-6aff278a57f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.521695] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa07fa4e-f6ca-458a-8a4f-0d07976b4aa2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.529388] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1833.531307] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5cfbfa5-8f7c-459b-b271-2b15044fe8cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.532965] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1833.533677] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ac53ac0-6735-4d79-a238-70dbca152eec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.539722] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1833.539722] env[62619]: value = "task-1778411" [ 1833.539722] env[62619]: _type = "Task" [ 1833.539722] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.547551] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778411, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.612949] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1833.613216] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1833.613439] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleting the datastore file [datastore1] 54da64a0-4acf-4025-9b51-7af61dbd55fc {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1833.613750] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1a75420-6168-4163-8643-e05d94aa16bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.620908] env[62619]: DEBUG oslo_vmware.api [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1833.620908] env[62619]: value = "task-1778413" [ 1833.620908] env[62619]: _type = "Task" [ 1833.620908] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.629416] env[62619]: DEBUG oslo_vmware.api [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778413, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.742950] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.743523] env[62619]: DEBUG nova.compute.manager [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1833.747708] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.312s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.795417] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778410, 'name': Rename_Task, 'duration_secs': 0.398795} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.796811] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1833.797106] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b19ad66-e2c8-4b2e-b873-05c353fbf0db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.804987] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1833.804987] env[62619]: value = "task-1778414" [ 1833.804987] env[62619]: _type = "Task" [ 1833.804987] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.812279] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.820201] env[62619]: DEBUG nova.compute.manager [req-3cca99f2-5c06-406c-858f-da59cdf6c527 req-7838bc6e-49b8-45b3-9068-979ddb775d87 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Received event network-vif-plugged-ffd85085-7cdb-4167-99cf-28e1267cf58b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1833.820449] env[62619]: DEBUG oslo_concurrency.lockutils [req-3cca99f2-5c06-406c-858f-da59cdf6c527 req-7838bc6e-49b8-45b3-9068-979ddb775d87 service nova] Acquiring lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.820847] env[62619]: DEBUG oslo_concurrency.lockutils [req-3cca99f2-5c06-406c-858f-da59cdf6c527 req-7838bc6e-49b8-45b3-9068-979ddb775d87 service nova] Lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.820847] env[62619]: DEBUG oslo_concurrency.lockutils [req-3cca99f2-5c06-406c-858f-da59cdf6c527 req-7838bc6e-49b8-45b3-9068-979ddb775d87 service nova] Lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.821033] env[62619]: DEBUG nova.compute.manager [req-3cca99f2-5c06-406c-858f-da59cdf6c527 req-7838bc6e-49b8-45b3-9068-979ddb775d87 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] No waiting events found dispatching network-vif-plugged-ffd85085-7cdb-4167-99cf-28e1267cf58b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1833.821215] env[62619]: WARNING nova.compute.manager [req-3cca99f2-5c06-406c-858f-da59cdf6c527 req-7838bc6e-49b8-45b3-9068-979ddb775d87 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Received unexpected event network-vif-plugged-ffd85085-7cdb-4167-99cf-28e1267cf58b for instance with vm_state building and task_state spawning. [ 1833.824773] env[62619]: DEBUG nova.network.neutron [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Successfully updated port: ffd85085-7cdb-4167-99cf-28e1267cf58b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1834.049899] env[62619]: DEBUG oslo_vmware.api [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778411, 'name': PowerOnVM_Task, 'duration_secs': 0.409278} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.050258] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1834.135739] env[62619]: DEBUG oslo_vmware.api [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778413, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256915} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.136069] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1834.136297] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1834.136498] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1834.136699] env[62619]: INFO nova.compute.manager [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1834.136998] env[62619]: DEBUG oslo.service.loopingcall [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1834.137579] env[62619]: DEBUG nova.compute.manager [-] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1834.137739] env[62619]: DEBUG nova.network.neutron [-] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1834.233978] env[62619]: DEBUG nova.network.neutron [req-85a3204f-7ceb-4641-b4a6-570a8df7837d req-d977bc9a-abc1-4ead-ac74-5b9189191429 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Updated VIF entry in instance network info cache for port d8de64b8-2687-42e0-91e6-97aa76f28d9f. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1834.234396] env[62619]: DEBUG nova.network.neutron [req-85a3204f-7ceb-4641-b4a6-570a8df7837d req-d977bc9a-abc1-4ead-ac74-5b9189191429 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Updating instance_info_cache with network_info: [{"id": "d8de64b8-2687-42e0-91e6-97aa76f28d9f", "address": "fa:16:3e:73:56:6e", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8de64b8-26", "ovs_interfaceid": "d8de64b8-2687-42e0-91e6-97aa76f28d9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.249541] env[62619]: DEBUG nova.compute.utils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1834.264239] env[62619]: DEBUG nova.compute.manager [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1834.264447] env[62619]: DEBUG nova.network.neutron [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1834.267633] env[62619]: DEBUG nova.compute.manager [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1834.311343] env[62619]: DEBUG nova.policy [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53deb71781e14136bff2b0b6c6a82890', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2c7470712c14fa9bc1804ae2431107b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1834.319444] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778414, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.328527] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquiring lock "refresh_cache-e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.328699] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquired lock "refresh_cache-e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.328856] env[62619]: DEBUG nova.network.neutron [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1834.635511] env[62619]: DEBUG nova.network.neutron [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Successfully created port: b1a6212d-63f4-4343-9100-d88707a89c10 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1834.678119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "d3aa352b-7d2d-416e-a579-9636619bb025" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.681019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "d3aa352b-7d2d-416e-a579-9636619bb025" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.681019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "d3aa352b-7d2d-416e-a579-9636619bb025-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.681019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "d3aa352b-7d2d-416e-a579-9636619bb025-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.681019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "d3aa352b-7d2d-416e-a579-9636619bb025-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.683810] env[62619]: INFO nova.compute.manager [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Terminating instance [ 1834.737702] env[62619]: DEBUG oslo_concurrency.lockutils [req-85a3204f-7ceb-4641-b4a6-570a8df7837d req-d977bc9a-abc1-4ead-ac74-5b9189191429 service nova] Releasing lock "refresh_cache-70265068-1185-4f23-b0b4-ed2378c17a89" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.818139] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.818242] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e32cb991-a018-4b55-8cdf-378e212c8434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.818374] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 8745aa7f-9848-4320-94b5-08b7e3bccf80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.818496] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e302e431-1f95-4ab5-bfca-59450fd887f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.818644] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 5cf7ca57-351f-48ab-8758-b30f50cd607f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.818756] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 52b29fef-eab6-4541-a570-af9c0c021a75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.818847] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 917960ca-3870-4e4e-aafe-3c6d77cf7c51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.818962] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cee0356c-25d7-48ca-be09-16b0e1b56a41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.819084] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance a250f05d-cd74-436d-b656-2a9e55527809 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.819194] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance d3aa352b-7d2d-416e-a579-9636619bb025 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.819446] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.819513] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 54da64a0-4acf-4025-9b51-7af61dbd55fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.819610] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 70265068-1185-4f23-b0b4-ed2378c17a89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.819779] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 1d40d434-fa8f-463e-908a-24c61538fe33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.819822] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e1cd6059-ddb0-4f10-a569-e0bc71a63f4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.819958] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4c66bbdf-af6a-4705-8219-85cf19f8314e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1834.820668] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1834.820944] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1834.828812] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778414, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.891595] env[62619]: DEBUG nova.network.neutron [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1834.987463] env[62619]: DEBUG nova.network.neutron [-] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.090329] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf1e5c5-9eb0-43b9-a1cf-10c8f35dcdca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.101555] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8767c3e-778d-401b-b944-bf0bf28a73cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.144910] env[62619]: INFO nova.compute.manager [None req-2954d61d-adf9-447d-8bbe-25855b24bd24 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance to original state: 'active' [ 1835.152019] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f8c2ae-4c38-4596-8c41-76f374c0f93d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.154882] env[62619]: DEBUG nova.network.neutron [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Updating instance_info_cache with network_info: [{"id": "ffd85085-7cdb-4167-99cf-28e1267cf58b", "address": "fa:16:3e:aa:52:ff", "network": {"id": "01f1c303-0b43-4210-98c3-fbfa0c2dcfde", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1127361670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a13f69e23df4b36a08f2f0a90734f6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffd85085-7c", "ovs_interfaceid": "ffd85085-7cdb-4167-99cf-28e1267cf58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.162777] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e376cd40-f53c-43c6-9115-f3ce510ee51f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.178671] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1835.188445] env[62619]: DEBUG nova.compute.manager [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1835.188692] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1835.189584] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab2b74f-3e00-437e-a770-00db1f427022 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.198712] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1835.198959] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7148b24a-5845-4aac-b106-5605d8993aef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.205650] env[62619]: DEBUG oslo_vmware.api [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1835.205650] env[62619]: value = "task-1778417" [ 1835.205650] env[62619]: _type = "Task" [ 1835.205650] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.213793] env[62619]: DEBUG oslo_vmware.api [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.286210] env[62619]: DEBUG nova.compute.manager [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1835.314593] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1835.314912] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1835.315036] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1835.315264] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1835.315430] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1835.315840] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1835.316126] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1835.316315] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1835.316569] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1835.316792] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1835.317034] env[62619]: DEBUG nova.virt.hardware [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1835.318144] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e191c5f-2b76-47e1-a02b-d4fc3c54969c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.324142] env[62619]: DEBUG oslo_vmware.api [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778414, 'name': PowerOnVM_Task, 'duration_secs': 1.348558} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.324789] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1835.325039] env[62619]: INFO nova.compute.manager [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Took 9.01 seconds to spawn the instance on the hypervisor. [ 1835.325244] env[62619]: DEBUG nova.compute.manager [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1835.326120] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c9cbc8-7e13-45bb-937a-1f0498793040 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.334938] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3fb971-aa01-4396-ad72-af2edc1f085f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.490763] env[62619]: INFO nova.compute.manager [-] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Took 1.35 seconds to deallocate network for instance. [ 1835.591787] env[62619]: DEBUG nova.compute.manager [req-d170de61-c2a9-4452-85b2-9fbfafbcf3a9 req-56e139a7-3a93-4030-80c2-d0edd7e6bb9d service nova] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Received event network-vif-deleted-963179af-510c-47e9-a81a-2f99f2055e2c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1835.661027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Releasing lock "refresh_cache-e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.661027] env[62619]: DEBUG nova.compute.manager [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Instance network_info: |[{"id": "ffd85085-7cdb-4167-99cf-28e1267cf58b", "address": "fa:16:3e:aa:52:ff", "network": {"id": "01f1c303-0b43-4210-98c3-fbfa0c2dcfde", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1127361670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a13f69e23df4b36a08f2f0a90734f6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffd85085-7c", "ovs_interfaceid": "ffd85085-7cdb-4167-99cf-28e1267cf58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1835.663789] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:52:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ffd85085-7cdb-4167-99cf-28e1267cf58b', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1835.671681] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Creating folder: Project (0a13f69e23df4b36a08f2f0a90734f6a). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1835.672317] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7841371c-ae78-46f9-b8db-42aedab35d90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.683026] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1835.686787] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Created folder: Project (0a13f69e23df4b36a08f2f0a90734f6a) in parent group-v368875. [ 1835.686968] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Creating folder: Instances. Parent ref: group-v369159. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1835.687430] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a6028e0-1ffc-4f56-a1fb-a4e48884dc28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.699134] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Created folder: Instances in parent group-v369159. [ 1835.699134] env[62619]: DEBUG oslo.service.loopingcall [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.699134] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1835.699134] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac182ac7-d14e-45a6-ab49-d535ca66d595 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.723734] env[62619]: DEBUG oslo_vmware.api [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778417, 'name': PowerOffVM_Task, 'duration_secs': 0.276872} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.725215] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1835.725442] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1835.725621] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1835.725621] env[62619]: value = "task-1778420" [ 1835.725621] env[62619]: _type = "Task" [ 1835.725621] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.725804] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a6014bc-cb12-43c1-af7d-b15a9f4b0783 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.737260] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778420, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.855866] env[62619]: INFO nova.compute.manager [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Took 26.66 seconds to build instance. [ 1835.927111] env[62619]: DEBUG nova.compute.manager [req-88d3b942-118d-47bd-a15f-314ba23bd9ea req-11507959-57a9-42c9-928a-ae17a33b7855 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Received event network-changed-ffd85085-7cdb-4167-99cf-28e1267cf58b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1835.927281] env[62619]: DEBUG nova.compute.manager [req-88d3b942-118d-47bd-a15f-314ba23bd9ea req-11507959-57a9-42c9-928a-ae17a33b7855 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Refreshing instance network info cache due to event network-changed-ffd85085-7cdb-4167-99cf-28e1267cf58b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1835.927431] env[62619]: DEBUG oslo_concurrency.lockutils [req-88d3b942-118d-47bd-a15f-314ba23bd9ea req-11507959-57a9-42c9-928a-ae17a33b7855 service nova] Acquiring lock "refresh_cache-e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.927580] env[62619]: DEBUG oslo_concurrency.lockutils [req-88d3b942-118d-47bd-a15f-314ba23bd9ea req-11507959-57a9-42c9-928a-ae17a33b7855 service nova] Acquired lock "refresh_cache-e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.927788] env[62619]: DEBUG nova.network.neutron [req-88d3b942-118d-47bd-a15f-314ba23bd9ea req-11507959-57a9-42c9-928a-ae17a33b7855 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Refreshing network info cache for port ffd85085-7cdb-4167-99cf-28e1267cf58b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1835.972653] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1835.972871] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1835.973061] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Deleting the datastore file [datastore1] d3aa352b-7d2d-416e-a579-9636619bb025 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1835.973326] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88d14aaf-628a-4108-a5e8-86bdea977ca9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.980133] env[62619]: DEBUG oslo_vmware.api [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1835.980133] env[62619]: value = "task-1778422" [ 1835.980133] env[62619]: _type = "Task" [ 1835.980133] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.988360] env[62619]: DEBUG oslo_vmware.api [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.996842] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.188790] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1836.189037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.443s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.189586] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.193s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.189817] env[62619]: DEBUG nova.objects.instance [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lazy-loading 'resources' on Instance uuid 54da64a0-4acf-4025-9b51-7af61dbd55fc {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1836.238165] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778420, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.262256] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.262609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.262861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.263098] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.263319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.265618] env[62619]: INFO nova.compute.manager [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Terminating instance [ 1836.280594] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.280825] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.357615] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b63160b9-a666-46e2-8225-e63c99346dec tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1d40d434-fa8f-463e-908a-24c61538fe33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.165s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.493771] env[62619]: DEBUG oslo_vmware.api [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778422, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.258207} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.493771] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1836.493771] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1836.493771] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1836.493771] env[62619]: INFO nova.compute.manager [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1836.493771] env[62619]: DEBUG oslo.service.loopingcall [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1836.493771] env[62619]: DEBUG nova.compute.manager [-] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1836.493771] env[62619]: DEBUG nova.network.neutron [-] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1836.638918] env[62619]: DEBUG nova.network.neutron [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Successfully updated port: b1a6212d-63f4-4343-9100-d88707a89c10 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1836.676736] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "1d40d434-fa8f-463e-908a-24c61538fe33" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.677013] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1d40d434-fa8f-463e-908a-24c61538fe33" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.677203] env[62619]: DEBUG nova.compute.manager [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1836.678435] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa8a8ec-e08d-4f1e-a042-924818013c3b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.691063] env[62619]: DEBUG nova.compute.manager [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1836.691063] env[62619]: DEBUG nova.objects.instance [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lazy-loading 'flavor' on Instance uuid 1d40d434-fa8f-463e-908a-24c61538fe33 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1836.739733] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778420, 'name': CreateVM_Task, 'duration_secs': 0.547197} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.742437] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1836.743836] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.743836] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.744108] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1836.744349] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17bd6e05-8559-4c0b-ae0f-df308c9c28da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.749658] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for the task: (returnval){ [ 1836.749658] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523a89ed-50d4-8454-f1a4-470d42d2d058" [ 1836.749658] env[62619]: _type = "Task" [ 1836.749658] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.761413] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523a89ed-50d4-8454-f1a4-470d42d2d058, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.761674] env[62619]: DEBUG nova.network.neutron [req-88d3b942-118d-47bd-a15f-314ba23bd9ea req-11507959-57a9-42c9-928a-ae17a33b7855 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Updated VIF entry in instance network info cache for port ffd85085-7cdb-4167-99cf-28e1267cf58b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1836.761893] env[62619]: DEBUG nova.network.neutron [req-88d3b942-118d-47bd-a15f-314ba23bd9ea req-11507959-57a9-42c9-928a-ae17a33b7855 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Updating instance_info_cache with network_info: [{"id": "ffd85085-7cdb-4167-99cf-28e1267cf58b", "address": "fa:16:3e:aa:52:ff", "network": {"id": "01f1c303-0b43-4210-98c3-fbfa0c2dcfde", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1127361670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a13f69e23df4b36a08f2f0a90734f6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffd85085-7c", "ovs_interfaceid": "ffd85085-7cdb-4167-99cf-28e1267cf58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.769398] env[62619]: DEBUG nova.compute.manager [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1836.769610] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1836.769873] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3fc0570-073c-4b1e-a4ad-946e9d447c4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.779704] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1836.779704] env[62619]: value = "task-1778423" [ 1836.779704] env[62619]: _type = "Task" [ 1836.779704] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.784097] env[62619]: INFO nova.compute.manager [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Detaching volume ac33ba75-c333-4e12-8448-75caf34bd9c5 [ 1836.795487] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.820155] env[62619]: INFO nova.virt.block_device [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Attempting to driver detach volume ac33ba75-c333-4e12-8448-75caf34bd9c5 from mountpoint /dev/sdb [ 1836.820410] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1836.820578] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369133', 'volume_id': 'ac33ba75-c333-4e12-8448-75caf34bd9c5', 'name': 'volume-ac33ba75-c333-4e12-8448-75caf34bd9c5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '5cf7ca57-351f-48ab-8758-b30f50cd607f', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac33ba75-c333-4e12-8448-75caf34bd9c5', 'serial': 'ac33ba75-c333-4e12-8448-75caf34bd9c5'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1836.821443] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3b1d60-eaca-4c10-b517-16c52aafc79a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.858411] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd592c2-cef4-44ff-944f-dfea839c822b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.866660] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bce3b6-6b7c-4432-b815-af69a2ec7b60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.893051] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7815326-41b1-42a6-8e17-f843097739fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.896848] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1836.897090] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369157', 'volume_id': '292d6452-98d6-460a-b9d5-6f63c5392ab1', 'name': 'volume-292d6452-98d6-460a-b9d5-6f63c5392ab1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '52b29fef-eab6-4541-a570-af9c0c021a75', 'attached_at': '', 'detached_at': '', 'volume_id': '292d6452-98d6-460a-b9d5-6f63c5392ab1', 'serial': '292d6452-98d6-460a-b9d5-6f63c5392ab1'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1836.898224] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ee9089-5f61-4016-9e9f-6694828dd341 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.911684] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] The volume has not been displaced from its original location: [datastore1] volume-ac33ba75-c333-4e12-8448-75caf34bd9c5/volume-ac33ba75-c333-4e12-8448-75caf34bd9c5.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1836.918202] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Reconfiguring VM instance instance-00000043 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1836.920311] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ca20fa5-6e45-4e0c-bdbc-a9dd799eb7ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.947324] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a1e9fa-5590-4e80-9992-16ad55b294e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.951758] env[62619]: DEBUG oslo_vmware.api [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1836.951758] env[62619]: value = "task-1778424" [ 1836.951758] env[62619]: _type = "Task" [ 1836.951758] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.976409] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] volume-292d6452-98d6-460a-b9d5-6f63c5392ab1/volume-292d6452-98d6-460a-b9d5-6f63c5392ab1.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1836.979859] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61016e8f-fb9e-47f1-81ed-b064370b0089 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.998909] env[62619]: DEBUG oslo_vmware.api [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.003744] env[62619]: DEBUG oslo_vmware.api [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1837.003744] env[62619]: value = "task-1778425" [ 1837.003744] env[62619]: _type = "Task" [ 1837.003744] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.013578] env[62619]: DEBUG oslo_vmware.api [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778425, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.072856] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ee8b84-4d7f-4298-9dc4-bf2d07210694 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.081445] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4491d152-6a62-4da4-9eda-158e8acd8b94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.113180] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84916f43-57d9-487c-bf1c-210d9655d34c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.120621] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c9efed-460b-4a9d-923c-bf9bbfabf2cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.134486] env[62619]: DEBUG nova.compute.provider_tree [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1837.142354] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.142966] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.142966] env[62619]: DEBUG nova.network.neutron [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1837.190800] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1837.190800] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1837.190800] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1837.265474] env[62619]: DEBUG oslo_concurrency.lockutils [req-88d3b942-118d-47bd-a15f-314ba23bd9ea req-11507959-57a9-42c9-928a-ae17a33b7855 service nova] Releasing lock "refresh_cache-e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.265474] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523a89ed-50d4-8454-f1a4-470d42d2d058, 'name': SearchDatastore_Task, 'duration_secs': 0.034188} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.265474] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.265858] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1837.266100] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.266336] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.266609] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1837.266957] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8ebd669-3b7d-4c9d-b009-0ce8342dc1b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.278042] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1837.278244] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1837.279058] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41913dac-2aa6-4b33-a666-04559e19989e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.290330] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778423, 'name': PowerOffVM_Task, 'duration_secs': 0.217633} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.291728] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1837.291933] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1837.292141] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369135', 'volume_id': '35d065ad-5735-4fa5-b238-762eaceed4e2', 'name': 'volume-35d065ad-5735-4fa5-b238-762eaceed4e2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '917960ca-3870-4e4e-aafe-3c6d77cf7c51', 'attached_at': '2024-12-11T22:56:52.000000', 'detached_at': '', 'volume_id': '35d065ad-5735-4fa5-b238-762eaceed4e2', 'serial': '35d065ad-5735-4fa5-b238-762eaceed4e2'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1837.292476] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for the task: (returnval){ [ 1837.292476] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b5a04b-5795-138b-a5cb-b65509b4ed31" [ 1837.292476] env[62619]: _type = "Task" [ 1837.292476] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.293187] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd14b343-83f5-48d4-ad2d-2f5f460ace91 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.328256] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b5a04b-5795-138b-a5cb-b65509b4ed31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.328791] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb67d5a3-40c4-485b-a045-69069ae196ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.339299] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de88ea6-ae97-4250-a611-cc8e1a20a91c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.370486] env[62619]: DEBUG nova.network.neutron [-] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.372339] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114fc05d-0c82-407a-9ee2-a6c185f33a7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.389270] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] The volume has not been displaced from its original location: [datastore1] volume-35d065ad-5735-4fa5-b238-762eaceed4e2/volume-35d065ad-5735-4fa5-b238-762eaceed4e2.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1837.394259] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfiguring VM instance instance-00000054 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1837.395276] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68605a5b-24f2-413a-aa5c-09fdec577b0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.414089] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1837.414089] env[62619]: value = "task-1778426" [ 1837.414089] env[62619]: _type = "Task" [ 1837.414089] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.422309] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778426, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.462588] env[62619]: DEBUG oslo_vmware.api [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778424, 'name': ReconfigVM_Task, 'duration_secs': 0.343291} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.462983] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Reconfigured VM instance instance-00000043 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1837.468472] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1977e1d7-d5ea-458e-9366-b073bae46b7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.485142] env[62619]: DEBUG oslo_vmware.api [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1837.485142] env[62619]: value = "task-1778427" [ 1837.485142] env[62619]: _type = "Task" [ 1837.485142] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.493522] env[62619]: DEBUG oslo_vmware.api [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778427, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.514218] env[62619]: DEBUG oslo_vmware.api [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778425, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.616930] env[62619]: DEBUG nova.compute.manager [req-3116ba14-8672-455f-a3bd-49db4cf4fabe req-89c2ff80-6832-4ed6-9e95-5f48ed7a98f0 service nova] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Received event network-vif-deleted-67553e4f-60e3-4b66-acde-8a299ea8545b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1837.637592] env[62619]: DEBUG nova.scheduler.client.report [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1837.674252] env[62619]: DEBUG nova.network.neutron [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1837.698842] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1837.698842] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27e16e36-5c6e-41da-97a9-38127c59d860 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.707815] env[62619]: DEBUG oslo_vmware.api [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1837.707815] env[62619]: value = "task-1778428" [ 1837.707815] env[62619]: _type = "Task" [ 1837.707815] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.717533] env[62619]: DEBUG oslo_vmware.api [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778428, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.738987] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "refresh_cache-e32cb991-a018-4b55-8cdf-378e212c8434" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.738987] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquired lock "refresh_cache-e32cb991-a018-4b55-8cdf-378e212c8434" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.739175] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1837.808773] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b5a04b-5795-138b-a5cb-b65509b4ed31, 'name': SearchDatastore_Task, 'duration_secs': 0.022708} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.809806] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e174c104-e560-4efb-a67a-2a8b0f181691 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.818053] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for the task: (returnval){ [ 1837.818053] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523e8421-0985-07df-99fe-a5d8b33f2de0" [ 1837.818053] env[62619]: _type = "Task" [ 1837.818053] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.826484] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523e8421-0985-07df-99fe-a5d8b33f2de0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.864404] env[62619]: DEBUG nova.network.neutron [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [{"id": "b1a6212d-63f4-4343-9100-d88707a89c10", "address": "fa:16:3e:48:b2:0f", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a6212d-63", "ovs_interfaceid": "b1a6212d-63f4-4343-9100-d88707a89c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.876618] env[62619]: INFO nova.compute.manager [-] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Took 1.38 seconds to deallocate network for instance. [ 1837.925207] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778426, 'name': ReconfigVM_Task, 'duration_secs': 0.200572} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.925772] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Reconfigured VM instance instance-00000054 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1837.931993] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-809602b2-77fa-4e0e-8478-4b854e83c49b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.957898] env[62619]: DEBUG nova.compute.manager [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Received event network-vif-plugged-b1a6212d-63f4-4343-9100-d88707a89c10 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1837.958106] env[62619]: DEBUG oslo_concurrency.lockutils [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] Acquiring lock "4c66bbdf-af6a-4705-8219-85cf19f8314e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.958367] env[62619]: DEBUG oslo_concurrency.lockutils [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.958493] env[62619]: DEBUG oslo_concurrency.lockutils [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.958739] env[62619]: DEBUG nova.compute.manager [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] No waiting events found dispatching network-vif-plugged-b1a6212d-63f4-4343-9100-d88707a89c10 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1837.958832] env[62619]: WARNING nova.compute.manager [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Received unexpected event network-vif-plugged-b1a6212d-63f4-4343-9100-d88707a89c10 for instance with vm_state building and task_state spawning. [ 1837.958971] env[62619]: DEBUG nova.compute.manager [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Received event network-changed-b1a6212d-63f4-4343-9100-d88707a89c10 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1837.959176] env[62619]: DEBUG nova.compute.manager [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Refreshing instance network info cache due to event network-changed-b1a6212d-63f4-4343-9100-d88707a89c10. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1837.959339] env[62619]: DEBUG oslo_concurrency.lockutils [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] Acquiring lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.961232] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1837.961232] env[62619]: value = "task-1778430" [ 1837.961232] env[62619]: _type = "Task" [ 1837.961232] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.971416] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.995075] env[62619]: DEBUG oslo_vmware.api [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778427, 'name': ReconfigVM_Task, 'duration_secs': 0.149952} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.995425] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369133', 'volume_id': 'ac33ba75-c333-4e12-8448-75caf34bd9c5', 'name': 'volume-ac33ba75-c333-4e12-8448-75caf34bd9c5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '5cf7ca57-351f-48ab-8758-b30f50cd607f', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac33ba75-c333-4e12-8448-75caf34bd9c5', 'serial': 'ac33ba75-c333-4e12-8448-75caf34bd9c5'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1838.017166] env[62619]: DEBUG oslo_vmware.api [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778425, 'name': ReconfigVM_Task, 'duration_secs': 0.587361} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.017621] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Reconfigured VM instance instance-0000005f to attach disk [datastore1] volume-292d6452-98d6-460a-b9d5-6f63c5392ab1/volume-292d6452-98d6-460a-b9d5-6f63c5392ab1.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1838.024733] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71823dc8-87b3-48da-9dfc-4cf82fe2cfa8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.041158] env[62619]: DEBUG oslo_vmware.api [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1838.041158] env[62619]: value = "task-1778431" [ 1838.041158] env[62619]: _type = "Task" [ 1838.041158] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.049461] env[62619]: DEBUG oslo_vmware.api [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778431, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.144836] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.953s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.173468] env[62619]: INFO nova.scheduler.client.report [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted allocations for instance 54da64a0-4acf-4025-9b51-7af61dbd55fc [ 1838.222340] env[62619]: DEBUG oslo_vmware.api [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778428, 'name': PowerOffVM_Task, 'duration_secs': 0.316169} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.222782] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1838.223162] env[62619]: DEBUG nova.compute.manager [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1838.224498] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7281492-aa0d-4b83-bcc6-426cdb2f934d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.328387] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523e8421-0985-07df-99fe-a5d8b33f2de0, 'name': SearchDatastore_Task, 'duration_secs': 0.019793} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.328702] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.329100] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e1cd6059-ddb0-4f10-a569-e0bc71a63f4b/e1cd6059-ddb0-4f10-a569-e0bc71a63f4b.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1838.329393] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71dec3b1-162c-4443-b06e-c78fd1b59904 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.336251] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for the task: (returnval){ [ 1838.336251] env[62619]: value = "task-1778432" [ 1838.336251] env[62619]: _type = "Task" [ 1838.336251] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.343707] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778432, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.367145] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.367527] env[62619]: DEBUG nova.compute.manager [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Instance network_info: |[{"id": "b1a6212d-63f4-4343-9100-d88707a89c10", "address": "fa:16:3e:48:b2:0f", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a6212d-63", "ovs_interfaceid": "b1a6212d-63f4-4343-9100-d88707a89c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1838.367866] env[62619]: DEBUG oslo_concurrency.lockutils [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] Acquired lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.368096] env[62619]: DEBUG nova.network.neutron [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Refreshing network info cache for port b1a6212d-63f4-4343-9100-d88707a89c10 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1838.369389] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:b2:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1a6212d-63f4-4343-9100-d88707a89c10', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1838.377227] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Creating folder: Project (a2c7470712c14fa9bc1804ae2431107b). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1838.380043] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46381bf8-6efb-4e30-bcb3-ca83cdf343cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.382854] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.383091] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.383300] env[62619]: DEBUG nova.objects.instance [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lazy-loading 'resources' on Instance uuid d3aa352b-7d2d-416e-a579-9636619bb025 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1838.394628] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Created folder: Project (a2c7470712c14fa9bc1804ae2431107b) in parent group-v368875. [ 1838.394839] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Creating folder: Instances. Parent ref: group-v369162. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1838.395103] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-326f0d12-3a3e-4098-9e39-4728cde95843 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.404681] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Created folder: Instances in parent group-v369162. [ 1838.404946] env[62619]: DEBUG oslo.service.loopingcall [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1838.405171] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1838.405388] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fa954cf-e6a8-4147-9e84-b70cec3c3fbc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.427705] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1838.427705] env[62619]: value = "task-1778435" [ 1838.427705] env[62619]: _type = "Task" [ 1838.427705] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.436714] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778435, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.475874] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778430, 'name': ReconfigVM_Task, 'duration_secs': 0.13074} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.476255] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369135', 'volume_id': '35d065ad-5735-4fa5-b238-762eaceed4e2', 'name': 'volume-35d065ad-5735-4fa5-b238-762eaceed4e2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '917960ca-3870-4e4e-aafe-3c6d77cf7c51', 'attached_at': '2024-12-11T22:56:52.000000', 'detached_at': '', 'volume_id': '35d065ad-5735-4fa5-b238-762eaceed4e2', 'serial': '35d065ad-5735-4fa5-b238-762eaceed4e2'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1838.476520] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1838.477317] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfb81ee-1cb2-434e-930d-e65ce50d4f8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.484046] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1838.484336] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45c5c9a6-6e34-4b49-ba4f-1377b156af46 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.542908] env[62619]: DEBUG nova.objects.instance [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'flavor' on Instance uuid 5cf7ca57-351f-48ab-8758-b30f50cd607f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1838.568444] env[62619]: DEBUG oslo_vmware.api [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778431, 'name': ReconfigVM_Task, 'duration_secs': 0.196182} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.570475] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369157', 'volume_id': '292d6452-98d6-460a-b9d5-6f63c5392ab1', 'name': 'volume-292d6452-98d6-460a-b9d5-6f63c5392ab1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '52b29fef-eab6-4541-a570-af9c0c021a75', 'attached_at': '', 'detached_at': '', 'volume_id': '292d6452-98d6-460a-b9d5-6f63c5392ab1', 'serial': '292d6452-98d6-460a-b9d5-6f63c5392ab1'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1838.575031] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1838.575031] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1838.575031] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleting the datastore file [datastore1] 917960ca-3870-4e4e-aafe-3c6d77cf7c51 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1838.575031] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e87d3a0c-ee19-4891-8d52-5c07f5249994 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.580576] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1838.580576] env[62619]: value = "task-1778437" [ 1838.580576] env[62619]: _type = "Task" [ 1838.580576] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.590106] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778437, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.683024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b905a42-1005-4bbe-9244-42176cbc064e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "54da64a0-4acf-4025-9b51-7af61dbd55fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.681s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.715916] env[62619]: DEBUG nova.network.neutron [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updated VIF entry in instance network info cache for port b1a6212d-63f4-4343-9100-d88707a89c10. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1838.716296] env[62619]: DEBUG nova.network.neutron [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [{"id": "b1a6212d-63f4-4343-9100-d88707a89c10", "address": "fa:16:3e:48:b2:0f", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a6212d-63", "ovs_interfaceid": "b1a6212d-63f4-4343-9100-d88707a89c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1838.746875] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ef4703d9-7efc-4f28-b83b-c41c1bac2a1b tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1d40d434-fa8f-463e-908a-24c61538fe33" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.070s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.847155] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778432, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.941092] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778435, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.091159] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778437, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.103814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef89473a-396c-44d8-947d-6875a0fdb927 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.115051] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc2c13d-f843-47d6-8835-4ff58de108ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.164998] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8b8d3d-9fb1-4f26-a5ba-8a5387555617 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.174702] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd117eaa-245f-4a23-86fd-97f40f6523c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.192040] env[62619]: DEBUG nova.compute.provider_tree [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1839.219407] env[62619]: DEBUG oslo_concurrency.lockutils [req-78abe7b9-5059-470b-9266-c6cd23f3b801 req-422f21f5-67ed-4de4-be58-c92a87de2f25 service nova] Releasing lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.287276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "1d40d434-fa8f-463e-908a-24c61538fe33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.287593] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1d40d434-fa8f-463e-908a-24c61538fe33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.287953] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "1d40d434-fa8f-463e-908a-24c61538fe33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.288110] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1d40d434-fa8f-463e-908a-24c61538fe33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.288303] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1d40d434-fa8f-463e-908a-24c61538fe33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.291161] env[62619]: INFO nova.compute.manager [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Terminating instance [ 1839.307740] env[62619]: DEBUG oslo_concurrency.lockutils [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "e32cb991-a018-4b55-8cdf-378e212c8434" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.308105] env[62619]: DEBUG oslo_concurrency.lockutils [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e32cb991-a018-4b55-8cdf-378e212c8434" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.308328] env[62619]: DEBUG oslo_concurrency.lockutils [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "e32cb991-a018-4b55-8cdf-378e212c8434-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.312026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e32cb991-a018-4b55-8cdf-378e212c8434-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.312026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e32cb991-a018-4b55-8cdf-378e212c8434-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.312026] env[62619]: INFO nova.compute.manager [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Terminating instance [ 1839.346608] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778432, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.438725] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778435, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.446835] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Updating instance_info_cache with network_info: [{"id": "3aadd3d9-5b8f-4d90-94a3-d818bbac3830", "address": "fa:16:3e:7e:6a:12", "network": {"id": "07757a8f-0918-4007-8160-0d88375143a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1572779111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e11e1bca0c747fd8b4a0ca3e220ba4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3aadd3d9-5b", "ovs_interfaceid": "3aadd3d9-5b8f-4d90-94a3-d818bbac3830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.560057] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fab39b60-177b-4073-a6ea-39dcf9b0363d tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.279s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.591093] env[62619]: DEBUG oslo_vmware.api [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778437, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.870123} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.591347] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1839.591519] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1839.591692] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1839.591895] env[62619]: INFO nova.compute.manager [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Took 2.82 seconds to destroy the instance on the hypervisor. [ 1839.592151] env[62619]: DEBUG oslo.service.loopingcall [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1839.592341] env[62619]: DEBUG nova.compute.manager [-] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1839.592434] env[62619]: DEBUG nova.network.neutron [-] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1839.621891] env[62619]: DEBUG nova.objects.instance [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lazy-loading 'flavor' on Instance uuid 52b29fef-eab6-4541-a570-af9c0c021a75 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1839.697092] env[62619]: DEBUG nova.scheduler.client.report [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1839.795168] env[62619]: DEBUG nova.compute.manager [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1839.795412] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1839.796384] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06139985-993a-4bf7-8ed1-44c43035fe12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.804985] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1839.805243] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81d0f57d-c6ae-471c-8fff-81a30733e253 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.814485] env[62619]: DEBUG nova.compute.manager [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1839.814680] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1839.815437] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660fc7aa-f81a-43e4-9fa7-43ea8623debd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.822976] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1839.823207] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cada9c0d-8478-4675-9e2f-222e3e5b1a53 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.829034] env[62619]: DEBUG oslo_vmware.api [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1839.829034] env[62619]: value = "task-1778439" [ 1839.829034] env[62619]: _type = "Task" [ 1839.829034] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.836553] env[62619]: DEBUG oslo_vmware.api [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778439, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.846565] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778432, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.131132} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.846945] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e1cd6059-ddb0-4f10-a569-e0bc71a63f4b/e1cd6059-ddb0-4f10-a569-e0bc71a63f4b.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1839.847301] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1839.847643] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e381b79-b2a4-4432-bce0-f9f70db7d721 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.854523] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for the task: (returnval){ [ 1839.854523] env[62619]: value = "task-1778440" [ 1839.854523] env[62619]: _type = "Task" [ 1839.854523] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.861817] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778440, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.901156] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1839.901397] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1839.901584] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleting the datastore file [datastore1] 1d40d434-fa8f-463e-908a-24c61538fe33 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1839.901849] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87b1d0cc-2df5-4ffd-8a5a-b72fa9063b19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.908501] env[62619]: DEBUG oslo_vmware.api [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1839.908501] env[62619]: value = "task-1778441" [ 1839.908501] env[62619]: _type = "Task" [ 1839.908501] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.917346] env[62619]: DEBUG oslo_vmware.api [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.939374] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778435, 'name': CreateVM_Task, 'duration_secs': 1.022264} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.939731] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1839.940297] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.940459] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.940794] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1839.941094] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0effdabd-6c70-4e2a-8e10-55d8b9b9f2c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.946720] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1839.946720] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529809bb-6d12-0c08-375d-4d2b0b3a1858" [ 1839.946720] env[62619]: _type = "Task" [ 1839.946720] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.952593] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Releasing lock "refresh_cache-e32cb991-a018-4b55-8cdf-378e212c8434" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.952593] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1839.952593] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.952593] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.952593] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.958145] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529809bb-6d12-0c08-375d-4d2b0b3a1858, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.045467] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.045750] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.045971] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.046171] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.046343] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.048528] env[62619]: INFO nova.compute.manager [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Terminating instance [ 1840.120464] env[62619]: DEBUG nova.compute.manager [req-accc0ce2-e289-4633-afd4-251184d99a26 req-72364367-3c9f-4439-a414-c89fa29978d6 service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Received event network-vif-deleted-5911524f-a8b5-4591-a312-ea0cefac24df {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1840.120669] env[62619]: INFO nova.compute.manager [req-accc0ce2-e289-4633-afd4-251184d99a26 req-72364367-3c9f-4439-a414-c89fa29978d6 service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Neutron deleted interface 5911524f-a8b5-4591-a312-ea0cefac24df; detaching it from the instance and deleting it from the info cache [ 1840.120844] env[62619]: DEBUG nova.network.neutron [req-accc0ce2-e289-4633-afd4-251184d99a26 req-72364367-3c9f-4439-a414-c89fa29978d6 service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.125948] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4966bc06-bb16-4f49-8e52-b03e875429f0 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.885s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.202443] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.227054] env[62619]: INFO nova.scheduler.client.report [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Deleted allocations for instance d3aa352b-7d2d-416e-a579-9636619bb025 [ 1840.339655] env[62619]: DEBUG oslo_vmware.api [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778439, 'name': PowerOffVM_Task, 'duration_secs': 0.202579} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.340427] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1840.340427] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1840.340427] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fce8ef29-7faa-420a-9c3e-bb9d53dc94f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.365992] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778440, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063201} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.366442] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1840.367331] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0eabe7e-c65f-434d-bafe-9c2ca14c624d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.390514] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] e1cd6059-ddb0-4f10-a569-e0bc71a63f4b/e1cd6059-ddb0-4f10-a569-e0bc71a63f4b.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1840.395017] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdbe86d0-cbaa-4666-a573-b11769a6b707 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.414345] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for the task: (returnval){ [ 1840.414345] env[62619]: value = "task-1778444" [ 1840.414345] env[62619]: _type = "Task" [ 1840.414345] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.420757] env[62619]: DEBUG oslo_vmware.api [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149802} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.421381] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1840.421582] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1840.421764] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1840.422565] env[62619]: INFO nova.compute.manager [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1840.422848] env[62619]: DEBUG oslo.service.loopingcall [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1840.423074] env[62619]: DEBUG nova.compute.manager [-] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1840.423175] env[62619]: DEBUG nova.network.neutron [-] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1840.428962] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778444, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.435579] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1840.435750] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1840.435930] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleting the datastore file [datastore1] e32cb991-a018-4b55-8cdf-378e212c8434 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1840.436202] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c356649e-33fc-407f-a63a-8ed87caa51a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.444901] env[62619]: DEBUG oslo_vmware.api [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for the task: (returnval){ [ 1840.444901] env[62619]: value = "task-1778445" [ 1840.444901] env[62619]: _type = "Task" [ 1840.444901] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.458392] env[62619]: DEBUG oslo_vmware.api [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.463275] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529809bb-6d12-0c08-375d-4d2b0b3a1858, 'name': SearchDatastore_Task, 'duration_secs': 0.01048} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.468963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.468963] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1840.468963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.468963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.468963] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1840.468963] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3e1268c-7e7a-4c6f-9c25-1efd0e659ce1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.477704] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1840.477974] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1840.479086] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b909d463-e3d3-4a0a-8be1-a8591c579b6d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.485477] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1840.485477] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52472316-1c9b-05d7-7955-5fb1609b512c" [ 1840.485477] env[62619]: _type = "Task" [ 1840.485477] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.495588] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52472316-1c9b-05d7-7955-5fb1609b512c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.552914] env[62619]: DEBUG nova.compute.manager [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1840.553249] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1840.554311] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0153cb-7c3b-432a-8aee-693c695c495c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.563755] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1840.564095] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59bd2e0b-535b-4c2f-b9a2-a564197f95a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.570254] env[62619]: DEBUG oslo_vmware.api [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1840.570254] env[62619]: value = "task-1778446" [ 1840.570254] env[62619]: _type = "Task" [ 1840.570254] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.579987] env[62619]: DEBUG oslo_vmware.api [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778446, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.592257] env[62619]: DEBUG nova.network.neutron [-] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.623410] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9c7ef01-2b38-473c-9761-a1e9c94d3c19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.634907] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6d4058-61d6-4c0c-a427-611d3b132854 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.683839] env[62619]: DEBUG nova.compute.manager [req-accc0ce2-e289-4633-afd4-251184d99a26 req-72364367-3c9f-4439-a414-c89fa29978d6 service nova] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Detach interface failed, port_id=5911524f-a8b5-4591-a312-ea0cefac24df, reason: Instance 917960ca-3870-4e4e-aafe-3c6d77cf7c51 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1840.739813] env[62619]: INFO nova.compute.manager [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Rescuing [ 1840.739813] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.739997] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.739997] env[62619]: DEBUG nova.network.neutron [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1840.741236] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8648538e-a7f5-47c0-bb7a-620cd84c933a tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "d3aa352b-7d2d-416e-a579-9636619bb025" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.062s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.926331] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778444, 'name': ReconfigVM_Task, 'duration_secs': 0.279467} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.926653] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Reconfigured VM instance instance-00000068 to attach disk [datastore1] e1cd6059-ddb0-4f10-a569-e0bc71a63f4b/e1cd6059-ddb0-4f10-a569-e0bc71a63f4b.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1840.927315] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e18753cc-094a-4028-9885-606780df7cac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.934591] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for the task: (returnval){ [ 1840.934591] env[62619]: value = "task-1778447" [ 1840.934591] env[62619]: _type = "Task" [ 1840.934591] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.943763] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778447, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.954012] env[62619]: DEBUG oslo_vmware.api [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Task: {'id': task-1778445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144493} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.954331] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1840.954558] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1840.954721] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1840.954892] env[62619]: INFO nova.compute.manager [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1840.955156] env[62619]: DEBUG oslo.service.loopingcall [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1840.955350] env[62619]: DEBUG nova.compute.manager [-] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1840.955448] env[62619]: DEBUG nova.network.neutron [-] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1840.980244] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "a250f05d-cd74-436d-b656-2a9e55527809" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.980524] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "a250f05d-cd74-436d-b656-2a9e55527809" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.980740] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "a250f05d-cd74-436d-b656-2a9e55527809-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.980930] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "a250f05d-cd74-436d-b656-2a9e55527809-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.981134] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "a250f05d-cd74-436d-b656-2a9e55527809-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.984034] env[62619]: INFO nova.compute.manager [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Terminating instance [ 1840.998599] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52472316-1c9b-05d7-7955-5fb1609b512c, 'name': SearchDatastore_Task, 'duration_secs': 0.01044} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.002477] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4cafeaa-c575-4416-bc89-67c0d4764c5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.008606] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1841.008606] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52532498-57fe-525a-8dbd-b0c753c80531" [ 1841.008606] env[62619]: _type = "Task" [ 1841.008606] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.016681] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52532498-57fe-525a-8dbd-b0c753c80531, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.080265] env[62619]: DEBUG oslo_vmware.api [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778446, 'name': PowerOffVM_Task, 'duration_secs': 0.430578} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.080536] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1841.080708] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1841.080992] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4a75367-e63d-473e-9e1b-199cb4526c8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.095217] env[62619]: INFO nova.compute.manager [-] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Took 1.50 seconds to deallocate network for instance. [ 1841.192807] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1841.193043] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1841.193236] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleting the datastore file [datastore1] 5cf7ca57-351f-48ab-8758-b30f50cd607f {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1841.193503] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5a92f2c-4bcc-49a7-b015-e3945cac6c7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.202585] env[62619]: DEBUG oslo_vmware.api [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1841.202585] env[62619]: value = "task-1778449" [ 1841.202585] env[62619]: _type = "Task" [ 1841.202585] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.211518] env[62619]: DEBUG oslo_vmware.api [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778449, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.294474] env[62619]: DEBUG nova.network.neutron [-] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.447605] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778447, 'name': Rename_Task, 'duration_secs': 0.277667} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.447605] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1841.447605] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72f9bdd2-21ff-4493-ac30-9867881d42b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.455026] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for the task: (returnval){ [ 1841.455026] env[62619]: value = "task-1778450" [ 1841.455026] env[62619]: _type = "Task" [ 1841.455026] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.466768] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778450, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.497159] env[62619]: DEBUG nova.compute.manager [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1841.497159] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1841.497159] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a6c713-065d-43cf-be59-0e7c92469bba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.503022] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1841.503505] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68f9aea2-3531-4407-b7bd-1bd53ba763d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.510765] env[62619]: DEBUG oslo_vmware.api [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1841.510765] env[62619]: value = "task-1778451" [ 1841.510765] env[62619]: _type = "Task" [ 1841.510765] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.524172] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52532498-57fe-525a-8dbd-b0c753c80531, 'name': SearchDatastore_Task, 'duration_secs': 0.019041} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.527943] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.528419] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4c66bbdf-af6a-4705-8219-85cf19f8314e/4c66bbdf-af6a-4705-8219-85cf19f8314e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1841.528839] env[62619]: DEBUG oslo_vmware.api [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.529930] env[62619]: DEBUG nova.network.neutron [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updating instance_info_cache with network_info: [{"id": "165f6c4a-b24e-4c32-845f-891bf7478563", "address": "fa:16:3e:ec:67:49", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165f6c4a-b2", "ovs_interfaceid": "165f6c4a-b24e-4c32-845f-891bf7478563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.531627] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6468da6-5034-43fd-bf51-2e57e961709e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.539807] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1841.539807] env[62619]: value = "task-1778452" [ 1841.539807] env[62619]: _type = "Task" [ 1841.539807] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.549528] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.643642] env[62619]: INFO nova.compute.manager [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Took 0.55 seconds to detach 1 volumes for instance. [ 1841.688299] env[62619]: DEBUG nova.network.neutron [-] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.713489] env[62619]: DEBUG oslo_vmware.api [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778449, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134487} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.713762] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1841.713953] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1841.714148] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1841.714321] env[62619]: INFO nova.compute.manager [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1841.714619] env[62619]: DEBUG oslo.service.loopingcall [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1841.714825] env[62619]: DEBUG nova.compute.manager [-] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1841.714923] env[62619]: DEBUG nova.network.neutron [-] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1841.797533] env[62619]: INFO nova.compute.manager [-] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Took 1.37 seconds to deallocate network for instance. [ 1841.965851] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778450, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.024480] env[62619]: DEBUG oslo_vmware.api [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778451, 'name': PowerOffVM_Task, 'duration_secs': 0.170548} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.024760] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1842.024935] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1842.025217] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ddfe9b0-8fd8-422d-ad7e-3aa7fed5bb89 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.039602] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.053247] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778452, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449635} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.053504] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 4c66bbdf-af6a-4705-8219-85cf19f8314e/4c66bbdf-af6a-4705-8219-85cf19f8314e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1842.053714] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1842.054084] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4920a6a-9e7f-446e-aa8a-aa715ebd5fd5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.069590] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1842.069590] env[62619]: value = "task-1778455" [ 1842.069590] env[62619]: _type = "Task" [ 1842.069590] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.082231] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778455, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.124189] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1842.124337] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1842.124518] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Deleting the datastore file [datastore1] a250f05d-cd74-436d-b656-2a9e55527809 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1842.124791] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e6605ce-fb62-42ff-99de-b5c6d7230389 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.134209] env[62619]: DEBUG oslo_vmware.api [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1842.134209] env[62619]: value = "task-1778456" [ 1842.134209] env[62619]: _type = "Task" [ 1842.134209] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.139960] env[62619]: DEBUG oslo_vmware.api [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.149685] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.150016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.150297] env[62619]: DEBUG nova.objects.instance [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'resources' on Instance uuid 917960ca-3870-4e4e-aafe-3c6d77cf7c51 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1842.161698] env[62619]: DEBUG nova.compute.manager [req-4e24e495-6239-4405-9c49-51230a722ab6 req-951b51b6-9e74-4cd1-b2f0-03fea434f071 service nova] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Received event network-vif-deleted-4db4c00e-c5a4-4990-ab07-206b511515d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1842.161975] env[62619]: DEBUG nova.compute.manager [req-4e24e495-6239-4405-9c49-51230a722ab6 req-951b51b6-9e74-4cd1-b2f0-03fea434f071 service nova] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Received event network-vif-deleted-3aadd3d9-5b8f-4d90-94a3-d818bbac3830 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1842.191940] env[62619]: INFO nova.compute.manager [-] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Took 1.24 seconds to deallocate network for instance. [ 1842.304504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.463385] env[62619]: DEBUG oslo_vmware.api [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778450, 'name': PowerOnVM_Task, 'duration_secs': 0.676878} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.463726] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1842.463934] env[62619]: INFO nova.compute.manager [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Took 9.09 seconds to spawn the instance on the hypervisor. [ 1842.464199] env[62619]: DEBUG nova.compute.manager [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1842.464986] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffec061-73b3-4546-85c8-3877765aeace {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.584229] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778455, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069448} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.586299] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1842.586299] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ae1d5f-07ad-4e7c-9083-c35539865ec3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.609530] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 4c66bbdf-af6a-4705-8219-85cf19f8314e/4c66bbdf-af6a-4705-8219-85cf19f8314e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1842.610293] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-417a2930-80fe-4281-8b3d-a5f638de40bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.636815] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1842.636815] env[62619]: value = "task-1778457" [ 1842.636815] env[62619]: _type = "Task" [ 1842.636815] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.643908] env[62619]: DEBUG oslo_vmware.api [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.323316} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.643908] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1842.644214] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1842.644451] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1842.644656] env[62619]: INFO nova.compute.manager [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1842.644952] env[62619]: DEBUG oslo.service.loopingcall [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1842.648221] env[62619]: DEBUG nova.compute.manager [-] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1842.648808] env[62619]: DEBUG nova.network.neutron [-] [instance: a250f05d-cd74-436d-b656-2a9e55527809] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1842.650179] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778457, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.698537] env[62619]: DEBUG oslo_concurrency.lockutils [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.833716] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d46e65-6529-40b5-8ec5-02eb1961c720 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.842412] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f32f0c-172d-43ee-8321-fa3b07da6090 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.873533] env[62619]: DEBUG nova.network.neutron [-] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.878579] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe847072-6286-4da3-a2a4-e1a2d7678e5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.887736] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f40d98-4afd-4162-a8b0-275132b14b58 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.903206] env[62619]: DEBUG nova.compute.provider_tree [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1842.982305] env[62619]: INFO nova.compute.manager [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Took 23.73 seconds to build instance. [ 1843.147017] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778457, 'name': ReconfigVM_Task, 'duration_secs': 0.277471} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.147320] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 4c66bbdf-af6a-4705-8219-85cf19f8314e/4c66bbdf-af6a-4705-8219-85cf19f8314e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1843.147982] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45d69f88-7902-4ef7-89a4-7c6635005928 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.154105] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1843.154105] env[62619]: value = "task-1778458" [ 1843.154105] env[62619]: _type = "Task" [ 1843.154105] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.166338] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778458, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.377030] env[62619]: INFO nova.compute.manager [-] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Took 1.66 seconds to deallocate network for instance. [ 1843.382489] env[62619]: DEBUG nova.network.neutron [-] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.406228] env[62619]: DEBUG nova.scheduler.client.report [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1843.484468] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4aa4b4a0-e4b9-4636-8179-c32337f9966a tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.243s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.584510] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1843.584890] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fdd2bec3-e37c-4d48-b955-9e2926e59640 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.592443] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1843.592443] env[62619]: value = "task-1778459" [ 1843.592443] env[62619]: _type = "Task" [ 1843.592443] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.602185] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778459, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.664148] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778458, 'name': Rename_Task, 'duration_secs': 0.154117} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.665831] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1843.665831] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea1329a3-b2fe-4422-b18a-1ef113b2c1d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.672072] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1843.672072] env[62619]: value = "task-1778460" [ 1843.672072] env[62619]: _type = "Task" [ 1843.672072] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.681204] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778460, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.884760] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.885245] env[62619]: INFO nova.compute.manager [-] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Took 1.24 seconds to deallocate network for instance. [ 1843.912059] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.762s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.914356] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.610s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.914569] env[62619]: DEBUG nova.objects.instance [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lazy-loading 'resources' on Instance uuid 1d40d434-fa8f-463e-908a-24c61538fe33 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1843.935699] env[62619]: INFO nova.scheduler.client.report [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleted allocations for instance 917960ca-3870-4e4e-aafe-3c6d77cf7c51 [ 1844.103122] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778459, 'name': PowerOffVM_Task, 'duration_secs': 0.301294} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.103974] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1844.104434] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a028caa-0bbe-4b22-9610-21b13631fd05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.126606] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427abb24-09c7-4598-9866-d91a730a01e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.165296] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1844.165656] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6fdfa518-c4d5-4ec4-9036-1d075abe8a22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.172376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquiring lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.172804] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.173066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquiring lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.173305] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.173506] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.180921] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1844.180921] env[62619]: value = "task-1778462" [ 1844.180921] env[62619]: _type = "Task" [ 1844.180921] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.181505] env[62619]: INFO nova.compute.manager [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Terminating instance [ 1844.192170] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778460, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.195248] env[62619]: DEBUG nova.compute.manager [req-5ef1cdad-97d0-4c0d-977a-1553bad3d687 req-dc0843e9-05a6-4b0e-8752-8c34f8d866af service nova] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Received event network-vif-deleted-89e91bb9-2bd5-4385-b3dd-cee4612bb166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1844.195487] env[62619]: DEBUG nova.compute.manager [req-5ef1cdad-97d0-4c0d-977a-1553bad3d687 req-dc0843e9-05a6-4b0e-8752-8c34f8d866af service nova] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Received event network-vif-deleted-e127117d-d3f7-4878-bd28-c36eddba80b8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1844.199650] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1844.199832] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1844.200108] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.200243] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.200418] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1844.201205] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81c24a86-4e2c-4768-be7a-a982ad2c0270 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.212866] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1844.213067] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1844.213791] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3928e2d3-771f-46b6-a5f9-239e515b3c8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.221282] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1844.221282] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523ee2ea-b89a-a252-ff73-c71c93d28795" [ 1844.221282] env[62619]: _type = "Task" [ 1844.221282] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.228724] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523ee2ea-b89a-a252-ff73-c71c93d28795, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.391811] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.444962] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eecc0e65-f03b-4393-a273-b00678dda28e tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "917960ca-3870-4e4e-aafe-3c6d77cf7c51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.182s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.607986] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8a91da-6018-47e7-bc37-b16ecf242ae0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.617569] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155c14d2-72af-4513-bf02-82f368a49adc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.652224] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36122002-2b48-4c03-a8db-1d50ef3dff7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.660483] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71803133-62ac-432d-8892-49364110a509 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.675418] env[62619]: DEBUG nova.compute.provider_tree [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1844.685824] env[62619]: DEBUG oslo_vmware.api [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778460, 'name': PowerOnVM_Task, 'duration_secs': 0.577802} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.686756] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1844.686970] env[62619]: INFO nova.compute.manager [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1844.687161] env[62619]: DEBUG nova.compute.manager [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1844.687965] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0dc32f3-8d70-4698-8f8c-817fd57c6758 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.694983] env[62619]: DEBUG nova.compute.manager [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1844.695200] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1844.698705] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13913863-5f83-4fcf-bfb1-016302486eb9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.706050] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1844.706288] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22e2b2a0-4e43-42e7-8b9d-0e6b5322e17a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.712362] env[62619]: DEBUG oslo_vmware.api [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for the task: (returnval){ [ 1844.712362] env[62619]: value = "task-1778463" [ 1844.712362] env[62619]: _type = "Task" [ 1844.712362] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.720842] env[62619]: DEBUG oslo_vmware.api [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778463, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.730803] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]523ee2ea-b89a-a252-ff73-c71c93d28795, 'name': SearchDatastore_Task, 'duration_secs': 0.014217} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.732180] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14f12498-88d5-4572-a2bd-b8e679c274e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.738103] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1844.738103] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52984d20-79b1-47df-3f10-f82fef53e3b1" [ 1844.738103] env[62619]: _type = "Task" [ 1844.738103] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.746355] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52984d20-79b1-47df-3f10-f82fef53e3b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.181726] env[62619]: DEBUG nova.scheduler.client.report [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1845.209484] env[62619]: INFO nova.compute.manager [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Took 23.54 seconds to build instance. [ 1845.226080] env[62619]: DEBUG oslo_vmware.api [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778463, 'name': PowerOffVM_Task, 'duration_secs': 0.192599} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.226903] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1845.227099] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1845.227350] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8b2c69e-8de3-4b2e-8230-4d3877334d47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.248285] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52984d20-79b1-47df-3f10-f82fef53e3b1, 'name': SearchDatastore_Task, 'duration_secs': 0.009666} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.248534] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.248800] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 52b29fef-eab6-4541-a570-af9c0c021a75/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. {{(pid=62619) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1845.249084] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36fbeab0-7689-4dbe-aea7-4ccab0f9d36a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.255954] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1845.255954] env[62619]: value = "task-1778465" [ 1845.255954] env[62619]: _type = "Task" [ 1845.255954] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.263678] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778465, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.306633] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1845.306868] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1845.307064] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Deleting the datastore file [datastore1] e1cd6059-ddb0-4f10-a569-e0bc71a63f4b {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1845.307336] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3aedbf69-056c-4697-8d02-79f11136bb81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.314540] env[62619]: DEBUG oslo_vmware.api [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for the task: (returnval){ [ 1845.314540] env[62619]: value = "task-1778466" [ 1845.314540] env[62619]: _type = "Task" [ 1845.314540] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.322545] env[62619]: DEBUG oslo_vmware.api [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.685711] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.771s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.688306] env[62619]: DEBUG oslo_concurrency.lockutils [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.990s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.689782] env[62619]: DEBUG nova.objects.instance [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lazy-loading 'resources' on Instance uuid e32cb991-a018-4b55-8cdf-378e212c8434 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1845.713193] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fb6a31d4-391e-4f5d-be63-5eeef7ec0142 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.053s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.713193] env[62619]: INFO nova.scheduler.client.report [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted allocations for instance 1d40d434-fa8f-463e-908a-24c61538fe33 [ 1845.771089] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778465, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.827202] env[62619]: DEBUG oslo_vmware.api [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Task: {'id': task-1778466, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.473481} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.828118] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1845.828118] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1845.828118] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1845.828118] env[62619]: INFO nova.compute.manager [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1845.828555] env[62619]: DEBUG oslo.service.loopingcall [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.828888] env[62619]: DEBUG nova.compute.manager [-] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1845.829020] env[62619]: DEBUG nova.network.neutron [-] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1846.018317] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.018579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.106458] env[62619]: DEBUG nova.compute.manager [req-ea169ff8-40f4-4b46-8535-7c3407c12fbc req-2004d1cc-5f3a-4354-aee4-da6fb9ab99fe service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Received event network-changed-b1a6212d-63f4-4343-9100-d88707a89c10 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1846.106596] env[62619]: DEBUG nova.compute.manager [req-ea169ff8-40f4-4b46-8535-7c3407c12fbc req-2004d1cc-5f3a-4354-aee4-da6fb9ab99fe service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Refreshing instance network info cache due to event network-changed-b1a6212d-63f4-4343-9100-d88707a89c10. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1846.106809] env[62619]: DEBUG oslo_concurrency.lockutils [req-ea169ff8-40f4-4b46-8535-7c3407c12fbc req-2004d1cc-5f3a-4354-aee4-da6fb9ab99fe service nova] Acquiring lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.106943] env[62619]: DEBUG oslo_concurrency.lockutils [req-ea169ff8-40f4-4b46-8535-7c3407c12fbc req-2004d1cc-5f3a-4354-aee4-da6fb9ab99fe service nova] Acquired lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.107145] env[62619]: DEBUG nova.network.neutron [req-ea169ff8-40f4-4b46-8535-7c3407c12fbc req-2004d1cc-5f3a-4354-aee4-da6fb9ab99fe service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Refreshing network info cache for port b1a6212d-63f4-4343-9100-d88707a89c10 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1846.221752] env[62619]: DEBUG oslo_concurrency.lockutils [None req-39c6f7dc-4521-4f46-8903-764aaadfe3f5 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "1d40d434-fa8f-463e-908a-24c61538fe33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.934s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.238135] env[62619]: DEBUG nova.compute.manager [req-5b5aefe3-0a71-4cda-8e70-ff087f0731db req-c16e1a0d-74ec-4c22-8cef-e4d61d63aee5 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Received event network-vif-deleted-ffd85085-7cdb-4167-99cf-28e1267cf58b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1846.238135] env[62619]: INFO nova.compute.manager [req-5b5aefe3-0a71-4cda-8e70-ff087f0731db req-c16e1a0d-74ec-4c22-8cef-e4d61d63aee5 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Neutron deleted interface ffd85085-7cdb-4167-99cf-28e1267cf58b; detaching it from the instance and deleting it from the info cache [ 1846.238135] env[62619]: DEBUG nova.network.neutron [req-5b5aefe3-0a71-4cda-8e70-ff087f0731db req-c16e1a0d-74ec-4c22-8cef-e4d61d63aee5 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.269857] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778465, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.770633} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.273128] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 52b29fef-eab6-4541-a570-af9c0c021a75/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. [ 1846.273128] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de7388b-c92f-4ff5-b75c-6018d43cc54e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.305575] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 52b29fef-eab6-4541-a570-af9c0c021a75/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1846.308140] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5307677-4d61-495f-8f0b-3f0e98e1bf9d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.327273] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1846.327273] env[62619]: value = "task-1778468" [ 1846.327273] env[62619]: _type = "Task" [ 1846.327273] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.339503] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778468, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.424646] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201f92be-6bfb-4901-b990-ede439336d29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.432548] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9492960b-91f0-4c3b-83bb-3b3fd5a2bcb3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.463174] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d93c67f-5357-473c-bf5e-c7321ff38a93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.470672] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8988ceb4-bb76-4b94-adb9-780cb627c280 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.485186] env[62619]: DEBUG nova.compute.provider_tree [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1846.524482] env[62619]: DEBUG nova.compute.manager [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1846.693783] env[62619]: DEBUG nova.network.neutron [-] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.741370] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48adfae4-7f97-4001-ad2f-dc60f4cdd4ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.751244] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee1deb5-2f0c-451f-ad13-0a8e20c0655e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.783975] env[62619]: DEBUG nova.compute.manager [req-5b5aefe3-0a71-4cda-8e70-ff087f0731db req-c16e1a0d-74ec-4c22-8cef-e4d61d63aee5 service nova] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Detach interface failed, port_id=ffd85085-7cdb-4167-99cf-28e1267cf58b, reason: Instance e1cd6059-ddb0-4f10-a569-e0bc71a63f4b could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1846.839179] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778468, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.902635] env[62619]: DEBUG nova.network.neutron [req-ea169ff8-40f4-4b46-8535-7c3407c12fbc req-2004d1cc-5f3a-4354-aee4-da6fb9ab99fe service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updated VIF entry in instance network info cache for port b1a6212d-63f4-4343-9100-d88707a89c10. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1846.903025] env[62619]: DEBUG nova.network.neutron [req-ea169ff8-40f4-4b46-8535-7c3407c12fbc req-2004d1cc-5f3a-4354-aee4-da6fb9ab99fe service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [{"id": "b1a6212d-63f4-4343-9100-d88707a89c10", "address": "fa:16:3e:48:b2:0f", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a6212d-63", "ovs_interfaceid": "b1a6212d-63f4-4343-9100-d88707a89c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.988621] env[62619]: DEBUG nova.scheduler.client.report [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1847.047213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.197122] env[62619]: INFO nova.compute.manager [-] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Took 1.37 seconds to deallocate network for instance. [ 1847.338616] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778468, 'name': ReconfigVM_Task, 'duration_secs': 0.814627} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.338918] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 52b29fef-eab6-4541-a570-af9c0c021a75/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1847.339820] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60397a8a-d801-4036-a807-1aaceaea31ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.371464] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48e0f01c-f0a4-440a-b2e8-a054084662c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.385651] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1847.385651] env[62619]: value = "task-1778469" [ 1847.385651] env[62619]: _type = "Task" [ 1847.385651] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.393596] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778469, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.406276] env[62619]: DEBUG oslo_concurrency.lockutils [req-ea169ff8-40f4-4b46-8535-7c3407c12fbc req-2004d1cc-5f3a-4354-aee4-da6fb9ab99fe service nova] Releasing lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.495651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.807s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.498551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.614s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.498819] env[62619]: DEBUG nova.objects.instance [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'resources' on Instance uuid 5cf7ca57-351f-48ab-8758-b30f50cd607f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1847.512906] env[62619]: INFO nova.scheduler.client.report [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Deleted allocations for instance e32cb991-a018-4b55-8cdf-378e212c8434 [ 1847.656034] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.656265] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.705423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.895752] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778469, 'name': ReconfigVM_Task, 'duration_secs': 0.349768} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.896439] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1847.896546] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-981536bb-9e31-4ff6-b858-ebeb11d54a0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.902085] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1847.902085] env[62619]: value = "task-1778470" [ 1847.902085] env[62619]: _type = "Task" [ 1847.902085] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.909458] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778470, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.021778] env[62619]: DEBUG oslo_concurrency.lockutils [None req-381d7339-2036-4476-83b8-5f470dfde99e tempest-ServersTestJSON-68127786 tempest-ServersTestJSON-68127786-project-member] Lock "e32cb991-a018-4b55-8cdf-378e212c8434" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.714s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.158614] env[62619]: DEBUG nova.compute.manager [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1848.180892] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146e7a9f-3231-4e15-8596-2990f10cc70c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.192013] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06424bca-5762-44dc-8d9e-011ab1802e2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.227998] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20ec927-9ccb-4cee-be9a-b5dd63dd4ec3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.238987] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9415bdb0-9ed5-4cb3-8c36-31a7007d881c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.258059] env[62619]: DEBUG nova.compute.provider_tree [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1848.411500] env[62619]: DEBUG oslo_vmware.api [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778470, 'name': PowerOnVM_Task, 'duration_secs': 0.390409} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.411755] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1848.414527] env[62619]: DEBUG nova.compute.manager [None req-9c970522-cbeb-484a-bed8-d367e31685d2 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1848.415331] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead3cfb8-9420-4806-aa7c-78b4864803c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.678561] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.762714] env[62619]: DEBUG nova.scheduler.client.report [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1849.269238] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.770s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.272786] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.881s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.273612] env[62619]: DEBUG nova.objects.instance [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lazy-loading 'resources' on Instance uuid a250f05d-cd74-436d-b656-2a9e55527809 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.309291] env[62619]: INFO nova.scheduler.client.report [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleted allocations for instance 5cf7ca57-351f-48ab-8758-b30f50cd607f [ 1849.819524] env[62619]: DEBUG oslo_concurrency.lockutils [None req-db2c51e6-c0c9-497e-9c8a-52c399708873 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "5cf7ca57-351f-48ab-8758-b30f50cd607f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.774s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.923544] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcaaf5bc-e307-4d68-8d45-5e21386d713e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.930347] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9f42a6-6636-4dab-84f2-448ee2f1282a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.962106] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f699c66-907f-4c1e-aaa7-30d2bb3a0a05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.970017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623550b7-a17c-40c5-a7b4-e50cc7634f4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.985019] env[62619]: DEBUG nova.compute.provider_tree [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1850.131103] env[62619]: INFO nova.compute.manager [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Unrescuing [ 1850.131400] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.131885] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquired lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.131885] env[62619]: DEBUG nova.network.neutron [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1850.489079] env[62619]: DEBUG nova.scheduler.client.report [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1850.994660] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.722s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.996894] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.950s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.999194] env[62619]: INFO nova.compute.claims [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1851.022440] env[62619]: INFO nova.scheduler.client.report [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Deleted allocations for instance a250f05d-cd74-436d-b656-2a9e55527809 [ 1851.036423] env[62619]: DEBUG nova.network.neutron [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updating instance_info_cache with network_info: [{"id": "165f6c4a-b24e-4c32-845f-891bf7478563", "address": "fa:16:3e:ec:67:49", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165f6c4a-b2", "ovs_interfaceid": "165f6c4a-b24e-4c32-845f-891bf7478563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.531250] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a1998378-abf8-4f62-980a-df5b6d155f71 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "a250f05d-cd74-436d-b656-2a9e55527809" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.551s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.539026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Releasing lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.539430] env[62619]: DEBUG nova.objects.instance [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lazy-loading 'flavor' on Instance uuid 52b29fef-eab6-4541-a570-af9c0c021a75 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1852.044661] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d840e16-7613-48fb-9c6c-95bf19584c8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.071573] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1852.074276] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac81bc18-ce54-435a-9226-fff64d01b082 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.080830] env[62619]: DEBUG oslo_vmware.api [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1852.080830] env[62619]: value = "task-1778473" [ 1852.080830] env[62619]: _type = "Task" [ 1852.080830] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.091205] env[62619]: DEBUG oslo_vmware.api [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.125928] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.126239] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.190067] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4136b21-033c-41cb-a44c-e8839f931ea7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.198179] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7307f3-1bb3-45d8-be4d-7cf57ac90e3b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.228264] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cdba0d-7d30-4e90-8860-c6ee2945a4f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.235650] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac2ffc0-5027-42e5-aa34-c6f0eede57a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.240860] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.241090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.241285] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "cee0356c-25d7-48ca-be09-16b0e1b56a41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.241458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.241615] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.243571] env[62619]: INFO nova.compute.manager [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Terminating instance [ 1852.253196] env[62619]: DEBUG nova.compute.provider_tree [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1852.592020] env[62619]: DEBUG oslo_vmware.api [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778473, 'name': PowerOffVM_Task, 'duration_secs': 0.287873} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.592020] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1852.596451] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Reconfiguring VM instance instance-0000005f to detach disk 2002 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1852.596708] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-408a6514-0fc5-4990-9134-a5be6b7539f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.615907] env[62619]: DEBUG oslo_vmware.api [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1852.615907] env[62619]: value = "task-1778474" [ 1852.615907] env[62619]: _type = "Task" [ 1852.615907] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.623853] env[62619]: DEBUG oslo_vmware.api [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778474, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.631346] env[62619]: DEBUG nova.compute.manager [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1852.757439] env[62619]: DEBUG nova.scheduler.client.report [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1852.761165] env[62619]: DEBUG nova.compute.manager [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1852.761369] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1852.762400] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1dd955c-36dc-4186-a2b2-d8721e254e61 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.771466] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1852.771732] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43e79500-dc82-4d99-b46c-3d365d67f9eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.779564] env[62619]: DEBUG oslo_vmware.api [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1852.779564] env[62619]: value = "task-1778475" [ 1852.779564] env[62619]: _type = "Task" [ 1852.779564] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.790279] env[62619]: DEBUG oslo_vmware.api [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778475, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.126359] env[62619]: DEBUG oslo_vmware.api [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778474, 'name': ReconfigVM_Task, 'duration_secs': 0.271047} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.126651] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Reconfigured VM instance instance-0000005f to detach disk 2002 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1853.126843] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1853.127110] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51aee399-d2a4-4004-b7a7-f0c2eb39d41f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.133426] env[62619]: DEBUG oslo_vmware.api [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1853.133426] env[62619]: value = "task-1778476" [ 1853.133426] env[62619]: _type = "Task" [ 1853.133426] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.144971] env[62619]: DEBUG oslo_vmware.api [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.161500] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.263730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.264289] env[62619]: DEBUG nova.compute.manager [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1853.267204] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.562s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.267440] env[62619]: DEBUG nova.objects.instance [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lazy-loading 'resources' on Instance uuid e1cd6059-ddb0-4f10-a569-e0bc71a63f4b {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1853.290193] env[62619]: DEBUG oslo_vmware.api [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778475, 'name': PowerOffVM_Task, 'duration_secs': 0.284957} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.290450] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1853.290631] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1853.290931] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b97b982d-9db5-4aab-9a36-3a13b8f4f0c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.369282] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1853.369597] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1853.369743] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Deleting the datastore file [datastore1] cee0356c-25d7-48ca-be09-16b0e1b56a41 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1853.370064] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31ee59dc-8691-4bfd-a15f-414f3c425bb9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.377984] env[62619]: DEBUG oslo_vmware.api [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for the task: (returnval){ [ 1853.377984] env[62619]: value = "task-1778478" [ 1853.377984] env[62619]: _type = "Task" [ 1853.377984] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.386260] env[62619]: DEBUG oslo_vmware.api [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778478, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.645855] env[62619]: DEBUG oslo_vmware.api [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778476, 'name': PowerOnVM_Task, 'duration_secs': 0.450167} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.646160] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1853.646420] env[62619]: DEBUG nova.compute.manager [None req-d3d33f86-1fad-41ad-9997-2792ddce0c9d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1853.647239] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15a967d-18fe-4c40-8810-e781d14d1aba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.771064] env[62619]: DEBUG nova.compute.utils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1853.774766] env[62619]: DEBUG nova.compute.manager [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1853.774940] env[62619]: DEBUG nova.network.neutron [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1853.814323] env[62619]: DEBUG nova.policy [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe6ee1c8d8ef4b718274da7be4f5fd01', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c82fb42e93ff479b971f49eb92f50832', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1853.889302] env[62619]: DEBUG oslo_vmware.api [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Task: {'id': task-1778478, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205378} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.889967] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1853.889967] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1853.889967] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1853.890225] env[62619]: INFO nova.compute.manager [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1853.890332] env[62619]: DEBUG oslo.service.loopingcall [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1853.894019] env[62619]: DEBUG nova.compute.manager [-] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1853.894019] env[62619]: DEBUG nova.network.neutron [-] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1853.940684] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57747549-a83c-42a2-9fc3-9eaa33505695 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.948934] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4335ed-60ab-4444-8202-d6bd7c1b18dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.979350] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1865ddc8-dcaa-4d28-be9e-87612e6858f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.987174] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a708eb6b-7b89-4b64-ba4d-93409500e510 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.000544] env[62619]: DEBUG nova.compute.provider_tree [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1854.278710] env[62619]: DEBUG nova.compute.manager [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1854.348841] env[62619]: DEBUG nova.compute.manager [req-9fdb79fe-c3b8-4b2f-b11e-adad258c4e95 req-9dad35d2-3e26-4fff-b077-684dc8197673 service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Received event network-vif-deleted-105882b4-fd3c-4267-bcf1-662dacfc582f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1854.349668] env[62619]: INFO nova.compute.manager [req-9fdb79fe-c3b8-4b2f-b11e-adad258c4e95 req-9dad35d2-3e26-4fff-b077-684dc8197673 service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Neutron deleted interface 105882b4-fd3c-4267-bcf1-662dacfc582f; detaching it from the instance and deleting it from the info cache [ 1854.349902] env[62619]: DEBUG nova.network.neutron [req-9fdb79fe-c3b8-4b2f-b11e-adad258c4e95 req-9dad35d2-3e26-4fff-b077-684dc8197673 service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.353991] env[62619]: DEBUG nova.network.neutron [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Successfully created port: b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1854.479708] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "21d9fc7a-228e-4b33-8534-55285d4e6e96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.479935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.503699] env[62619]: DEBUG nova.scheduler.client.report [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1854.814599] env[62619]: DEBUG nova.network.neutron [-] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.852955] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-42da91f5-7087-432d-802f-3a55f5dc9443 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.864371] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41205d5d-85e3-4783-bcc4-be78afbcfc4d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.894783] env[62619]: DEBUG nova.compute.manager [req-9fdb79fe-c3b8-4b2f-b11e-adad258c4e95 req-9dad35d2-3e26-4fff-b077-684dc8197673 service nova] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Detach interface failed, port_id=105882b4-fd3c-4267-bcf1-662dacfc582f, reason: Instance cee0356c-25d7-48ca-be09-16b0e1b56a41 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1854.982516] env[62619]: DEBUG nova.compute.manager [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1855.008479] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.741s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.014597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.333s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.014597] env[62619]: INFO nova.compute.claims [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1855.051071] env[62619]: INFO nova.scheduler.client.report [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Deleted allocations for instance e1cd6059-ddb0-4f10-a569-e0bc71a63f4b [ 1855.064363] env[62619]: DEBUG nova.compute.manager [req-2c226063-a24c-473a-bbc6-7aa2ea92b2fb req-19c435bc-b8aa-4036-ada8-c0a413357a58 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Received event network-changed-165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1855.064565] env[62619]: DEBUG nova.compute.manager [req-2c226063-a24c-473a-bbc6-7aa2ea92b2fb req-19c435bc-b8aa-4036-ada8-c0a413357a58 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Refreshing instance network info cache due to event network-changed-165f6c4a-b24e-4c32-845f-891bf7478563. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1855.064777] env[62619]: DEBUG oslo_concurrency.lockutils [req-2c226063-a24c-473a-bbc6-7aa2ea92b2fb req-19c435bc-b8aa-4036-ada8-c0a413357a58 service nova] Acquiring lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.064916] env[62619]: DEBUG oslo_concurrency.lockutils [req-2c226063-a24c-473a-bbc6-7aa2ea92b2fb req-19c435bc-b8aa-4036-ada8-c0a413357a58 service nova] Acquired lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.065151] env[62619]: DEBUG nova.network.neutron [req-2c226063-a24c-473a-bbc6-7aa2ea92b2fb req-19c435bc-b8aa-4036-ada8-c0a413357a58 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Refreshing network info cache for port 165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1855.289054] env[62619]: DEBUG nova.compute.manager [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1855.314176] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1855.314432] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1855.314586] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1855.314767] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1855.314913] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1855.315072] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1855.315287] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1855.315446] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1855.315609] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1855.315768] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1855.315954] env[62619]: DEBUG nova.virt.hardware [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1855.316816] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e15750e-3a16-4f5a-a911-cf3e76f53c75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.319570] env[62619]: INFO nova.compute.manager [-] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Took 1.43 seconds to deallocate network for instance. [ 1855.327653] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9465467e-8a78-43ab-8201-d5f9c6b18cf9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.501010] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.563224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9c18c12a-7cc6-49f2-94a9-e75c37075601 tempest-ServerMetadataNegativeTestJSON-914272101 tempest-ServerMetadataNegativeTestJSON-914272101-project-member] Lock "e1cd6059-ddb0-4f10-a569-e0bc71a63f4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.389s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.829055] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.908338] env[62619]: DEBUG nova.network.neutron [req-2c226063-a24c-473a-bbc6-7aa2ea92b2fb req-19c435bc-b8aa-4036-ada8-c0a413357a58 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updated VIF entry in instance network info cache for port 165f6c4a-b24e-4c32-845f-891bf7478563. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1855.908753] env[62619]: DEBUG nova.network.neutron [req-2c226063-a24c-473a-bbc6-7aa2ea92b2fb req-19c435bc-b8aa-4036-ada8-c0a413357a58 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updating instance_info_cache with network_info: [{"id": "165f6c4a-b24e-4c32-845f-891bf7478563", "address": "fa:16:3e:ec:67:49", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165f6c4a-b2", "ovs_interfaceid": "165f6c4a-b24e-4c32-845f-891bf7478563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.043771] env[62619]: DEBUG nova.network.neutron [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Successfully updated port: b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1856.194610] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16564cc-5410-4ae5-8f7e-7f0a8ef651a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.203175] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67fdb9f6-3007-4f7a-87d3-76b1111b1737 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.236700] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b86dd47-a572-4ac7-9259-1a7c822cc3ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.244988] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d29c22-3a42-4582-9e18-dd98c41541e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.259623] env[62619]: DEBUG nova.compute.provider_tree [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1856.374034] env[62619]: DEBUG nova.compute.manager [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received event network-vif-plugged-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1856.374267] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] Acquiring lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.374412] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.374571] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.374786] env[62619]: DEBUG nova.compute.manager [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] No waiting events found dispatching network-vif-plugged-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1856.375011] env[62619]: WARNING nova.compute.manager [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received unexpected event network-vif-plugged-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c for instance with vm_state building and task_state spawning. [ 1856.375294] env[62619]: DEBUG nova.compute.manager [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received event network-changed-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1856.375358] env[62619]: DEBUG nova.compute.manager [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Refreshing instance network info cache due to event network-changed-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1856.375527] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] Acquiring lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.375658] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] Acquired lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.375809] env[62619]: DEBUG nova.network.neutron [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Refreshing network info cache for port b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1856.411543] env[62619]: DEBUG oslo_concurrency.lockutils [req-2c226063-a24c-473a-bbc6-7aa2ea92b2fb req-19c435bc-b8aa-4036-ada8-c0a413357a58 service nova] Releasing lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.550400] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.766787] env[62619]: DEBUG nova.scheduler.client.report [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1856.913620] env[62619]: DEBUG nova.network.neutron [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1857.003268] env[62619]: DEBUG nova.network.neutron [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.092780] env[62619]: DEBUG nova.compute.manager [req-9c1ca47a-be88-4a57-a837-e4f9b4d34e5f req-531d2cdd-e117-4c62-9caf-5f042fe5bd27 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Received event network-changed-165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1857.092780] env[62619]: DEBUG nova.compute.manager [req-9c1ca47a-be88-4a57-a837-e4f9b4d34e5f req-531d2cdd-e117-4c62-9caf-5f042fe5bd27 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Refreshing instance network info cache due to event network-changed-165f6c4a-b24e-4c32-845f-891bf7478563. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1857.093515] env[62619]: DEBUG oslo_concurrency.lockutils [req-9c1ca47a-be88-4a57-a837-e4f9b4d34e5f req-531d2cdd-e117-4c62-9caf-5f042fe5bd27 service nova] Acquiring lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1857.093515] env[62619]: DEBUG oslo_concurrency.lockutils [req-9c1ca47a-be88-4a57-a837-e4f9b4d34e5f req-531d2cdd-e117-4c62-9caf-5f042fe5bd27 service nova] Acquired lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.093589] env[62619]: DEBUG nova.network.neutron [req-9c1ca47a-be88-4a57-a837-e4f9b4d34e5f req-531d2cdd-e117-4c62-9caf-5f042fe5bd27 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Refreshing network info cache for port 165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1857.271616] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.260s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.272631] env[62619]: DEBUG nova.compute.manager [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1857.275281] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.114s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.278819] env[62619]: INFO nova.compute.claims [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1857.504179] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e4eb79f-f4af-49ec-818b-10ae10f6ea23 req-010d4b12-1636-408d-be6f-dc20e8565291 service nova] Releasing lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.504594] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.504756] env[62619]: DEBUG nova.network.neutron [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1857.781976] env[62619]: DEBUG nova.compute.utils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1857.785379] env[62619]: DEBUG nova.compute.manager [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1857.785550] env[62619]: DEBUG nova.network.neutron [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1857.845680] env[62619]: DEBUG nova.policy [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd106b23f779045f788b2536afd8c623d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2377a52a195d4f0b9181207ab5741734', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1857.881020] env[62619]: DEBUG nova.network.neutron [req-9c1ca47a-be88-4a57-a837-e4f9b4d34e5f req-531d2cdd-e117-4c62-9caf-5f042fe5bd27 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updated VIF entry in instance network info cache for port 165f6c4a-b24e-4c32-845f-891bf7478563. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1857.881020] env[62619]: DEBUG nova.network.neutron [req-9c1ca47a-be88-4a57-a837-e4f9b4d34e5f req-531d2cdd-e117-4c62-9caf-5f042fe5bd27 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updating instance_info_cache with network_info: [{"id": "165f6c4a-b24e-4c32-845f-891bf7478563", "address": "fa:16:3e:ec:67:49", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165f6c4a-b2", "ovs_interfaceid": "165f6c4a-b24e-4c32-845f-891bf7478563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.055230] env[62619]: DEBUG nova.network.neutron [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1858.268474] env[62619]: DEBUG nova.network.neutron [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updating instance_info_cache with network_info: [{"id": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "address": "fa:16:3e:3b:fe:49", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb78d1b6e-10", "ovs_interfaceid": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.270240] env[62619]: DEBUG nova.network.neutron [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Successfully created port: 7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1858.286371] env[62619]: DEBUG nova.compute.manager [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1858.385845] env[62619]: DEBUG oslo_concurrency.lockutils [req-9c1ca47a-be88-4a57-a837-e4f9b4d34e5f req-531d2cdd-e117-4c62-9caf-5f042fe5bd27 service nova] Releasing lock "refresh_cache-52b29fef-eab6-4541-a570-af9c0c021a75" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.486863] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e352f464-1e7b-4143-9a22-47d639944eb8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.495396] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f80ed4-139a-435b-b054-0fc1097d3a07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.533867] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2db283-5217-4ee5-a2b3-650a088f68a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.542647] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e15e3f5-4886-43b1-8af5-b0ab3420c022 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.559062] env[62619]: DEBUG nova.compute.provider_tree [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1858.773555] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.773893] env[62619]: DEBUG nova.compute.manager [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Instance network_info: |[{"id": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "address": "fa:16:3e:3b:fe:49", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb78d1b6e-10", "ovs_interfaceid": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1858.774337] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:fe:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '816c6e38-e200-4544-8c5b-9fc3e16c5761', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b78d1b6e-104b-4041-bcc5-5802f9f6fe3c', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1858.782332] env[62619]: DEBUG oslo.service.loopingcall [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1858.782561] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1858.782784] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4337e2ed-9d42-4a4f-a863-c019ae462e92 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.807465] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1858.807465] env[62619]: value = "task-1778479" [ 1858.807465] env[62619]: _type = "Task" [ 1858.807465] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.826683] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778479, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.062724] env[62619]: DEBUG nova.scheduler.client.report [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1859.302904] env[62619]: DEBUG nova.compute.manager [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1859.322938] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778479, 'name': CreateVM_Task, 'duration_secs': 0.47944} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.322938] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1859.323981] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.324230] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.324805] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1859.324805] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c35a17c7-f2c7-424d-9a97-be049ed13a2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.332761] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1859.332761] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529f5b65-192d-1b68-cc20-3a8d64121442" [ 1859.332761] env[62619]: _type = "Task" [ 1859.332761] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.340673] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1859.340976] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1859.341199] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1859.341437] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1859.341638] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1859.341844] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1859.342124] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1859.342328] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1859.342536] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1859.342735] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1859.342947] env[62619]: DEBUG nova.virt.hardware [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1859.344114] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4a93a8-973a-4665-b25e-698ffbe9624c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.349410] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529f5b65-192d-1b68-cc20-3a8d64121442, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.355216] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ab0147-0c3d-42e2-92b4-fb1a6664ebc3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.568181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.568722] env[62619]: DEBUG nova.compute.manager [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1859.572063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.071s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.573638] env[62619]: INFO nova.compute.claims [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1859.794352] env[62619]: DEBUG nova.compute.manager [req-b4885f69-886e-4d28-ab5b-cc13e50809a1 req-3b7e90e1-f649-402b-81cf-d497ff454ae8 service nova] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Received event network-vif-plugged-7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1859.794875] env[62619]: DEBUG oslo_concurrency.lockutils [req-b4885f69-886e-4d28-ab5b-cc13e50809a1 req-3b7e90e1-f649-402b-81cf-d497ff454ae8 service nova] Acquiring lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.795253] env[62619]: DEBUG oslo_concurrency.lockutils [req-b4885f69-886e-4d28-ab5b-cc13e50809a1 req-3b7e90e1-f649-402b-81cf-d497ff454ae8 service nova] Lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.795895] env[62619]: DEBUG oslo_concurrency.lockutils [req-b4885f69-886e-4d28-ab5b-cc13e50809a1 req-3b7e90e1-f649-402b-81cf-d497ff454ae8 service nova] Lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.795895] env[62619]: DEBUG nova.compute.manager [req-b4885f69-886e-4d28-ab5b-cc13e50809a1 req-3b7e90e1-f649-402b-81cf-d497ff454ae8 service nova] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] No waiting events found dispatching network-vif-plugged-7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1859.795895] env[62619]: WARNING nova.compute.manager [req-b4885f69-886e-4d28-ab5b-cc13e50809a1 req-3b7e90e1-f649-402b-81cf-d497ff454ae8 service nova] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Received unexpected event network-vif-plugged-7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f for instance with vm_state building and task_state spawning. [ 1859.848086] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529f5b65-192d-1b68-cc20-3a8d64121442, 'name': SearchDatastore_Task, 'duration_secs': 0.011935} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.848394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.848647] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1859.849312] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.849312] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.849312] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1859.850036] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e18e227f-c8b6-4801-90e4-fd1f1db53951 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.863093] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1859.863343] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1859.864121] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-139abfd0-c1f7-4e96-9021-330b8b390aee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.875192] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1859.875192] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b4565-2f23-c853-d64e-f30d316cd681" [ 1859.875192] env[62619]: _type = "Task" [ 1859.875192] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.884540] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b4565-2f23-c853-d64e-f30d316cd681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.912347] env[62619]: DEBUG nova.network.neutron [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Successfully updated port: 7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1860.082088] env[62619]: DEBUG nova.compute.utils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1860.083566] env[62619]: DEBUG nova.compute.manager [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1860.085339] env[62619]: DEBUG nova.network.neutron [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1860.132245] env[62619]: DEBUG nova.policy [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b30d7e2e4a9447e8b2caa6f7fc30aa65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a8f5f9386ba4dfa869c288a30aaeada', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1860.386435] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525b4565-2f23-c853-d64e-f30d316cd681, 'name': SearchDatastore_Task, 'duration_secs': 0.010969} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.387237] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e7c7198-df09-44fa-8446-90981721cba1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.392973] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1860.392973] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52886af1-0526-e213-7580-1cb71b9a5daa" [ 1860.392973] env[62619]: _type = "Task" [ 1860.392973] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.402533] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52886af1-0526-e213-7580-1cb71b9a5daa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.415186] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "refresh_cache-f4e85890-ca7d-45a7-92ff-ab881c21c7ed" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.415304] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "refresh_cache-f4e85890-ca7d-45a7-92ff-ab881c21c7ed" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.415409] env[62619]: DEBUG nova.network.neutron [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1860.538152] env[62619]: DEBUG nova.network.neutron [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Successfully created port: b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1860.588335] env[62619]: DEBUG nova.compute.manager [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1860.900858] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c7aecb-aa26-41e8-8075-124c838e026b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.911624] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52886af1-0526-e213-7580-1cb71b9a5daa, 'name': SearchDatastore_Task, 'duration_secs': 0.010075} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.913524] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.913782] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a0953370-77f2-4e3b-a92e-cb12b3a82361/a0953370-77f2-4e3b-a92e-cb12b3a82361.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1860.914256] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-216e58b6-686a-441d-82c6-66f5d5d42d45 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.916888] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045c0c32-7f20-46bb-8014-79ecef681706 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.958212] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2605783b-b14c-4763-89ca-b174692880dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.960360] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1860.960360] env[62619]: value = "task-1778480" [ 1860.960360] env[62619]: _type = "Task" [ 1860.960360] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.967620] env[62619]: DEBUG nova.network.neutron [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1860.971887] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a52b7e3-737d-4184-872e-6e24d943e0d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.980426] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.990916] env[62619]: DEBUG nova.compute.provider_tree [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.130574] env[62619]: DEBUG nova.network.neutron [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Updating instance_info_cache with network_info: [{"id": "7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f", "address": "fa:16:3e:c1:fb:59", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ac08a6c-7d", "ovs_interfaceid": "7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.407117] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.410828] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.472779] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778480, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517696} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.472779] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a0953370-77f2-4e3b-a92e-cb12b3a82361/a0953370-77f2-4e3b-a92e-cb12b3a82361.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1861.472779] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1861.472779] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d0546c6-dee2-4bd2-af4c-862f78428752 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.480645] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1861.480645] env[62619]: value = "task-1778481" [ 1861.480645] env[62619]: _type = "Task" [ 1861.480645] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.489680] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778481, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.493824] env[62619]: DEBUG nova.scheduler.client.report [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1861.598426] env[62619]: DEBUG nova.compute.manager [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1861.626237] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1861.626507] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1861.626664] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1861.626843] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1861.626985] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1861.627158] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1861.627406] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1861.627569] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1861.627731] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1861.627888] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1861.628095] env[62619]: DEBUG nova.virt.hardware [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1861.628955] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469b1865-6b9e-42b4-b410-30db19b64870 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.633264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "refresh_cache-f4e85890-ca7d-45a7-92ff-ab881c21c7ed" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.633564] env[62619]: DEBUG nova.compute.manager [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Instance network_info: |[{"id": "7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f", "address": "fa:16:3e:c1:fb:59", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ac08a6c-7d", "ovs_interfaceid": "7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1861.633922] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:fb:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1861.641291] env[62619]: DEBUG oslo.service.loopingcall [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1861.643499] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1861.644080] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f9e049d-ecae-4ebf-948e-7c9bfcd86f02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.658991] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bef9af-f3a1-48ac-9c4f-29c69a852ba7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.680950] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1861.680950] env[62619]: value = "task-1778482" [ 1861.680950] env[62619]: _type = "Task" [ 1861.680950] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.689275] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778482, 'name': CreateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.821799] env[62619]: DEBUG nova.compute.manager [req-e45316c5-af9c-402a-9003-f548eac624b0 req-78d43046-788d-4045-9599-6af3dd72fb3c service nova] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Received event network-changed-7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1861.822016] env[62619]: DEBUG nova.compute.manager [req-e45316c5-af9c-402a-9003-f548eac624b0 req-78d43046-788d-4045-9599-6af3dd72fb3c service nova] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Refreshing instance network info cache due to event network-changed-7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1861.822252] env[62619]: DEBUG oslo_concurrency.lockutils [req-e45316c5-af9c-402a-9003-f548eac624b0 req-78d43046-788d-4045-9599-6af3dd72fb3c service nova] Acquiring lock "refresh_cache-f4e85890-ca7d-45a7-92ff-ab881c21c7ed" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.822399] env[62619]: DEBUG oslo_concurrency.lockutils [req-e45316c5-af9c-402a-9003-f548eac624b0 req-78d43046-788d-4045-9599-6af3dd72fb3c service nova] Acquired lock "refresh_cache-f4e85890-ca7d-45a7-92ff-ab881c21c7ed" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.822560] env[62619]: DEBUG nova.network.neutron [req-e45316c5-af9c-402a-9003-f548eac624b0 req-78d43046-788d-4045-9599-6af3dd72fb3c service nova] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Refreshing network info cache for port 7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1861.910020] env[62619]: DEBUG nova.compute.manager [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1861.990457] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778481, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06772} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.990809] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1861.991667] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8dad45c-9e5f-4fef-a7dc-487c10528d89 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.006508] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.006897] env[62619]: DEBUG nova.compute.manager [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1862.018348] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] a0953370-77f2-4e3b-a92e-cb12b3a82361/a0953370-77f2-4e3b-a92e-cb12b3a82361.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1862.018972] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.190s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.019239] env[62619]: DEBUG nova.objects.instance [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lazy-loading 'resources' on Instance uuid cee0356c-25d7-48ca-be09-16b0e1b56a41 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1862.020464] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76b3533e-3170-43f0-abef-0bf2a7f2eb8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.042639] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1862.042639] env[62619]: value = "task-1778483" [ 1862.042639] env[62619]: _type = "Task" [ 1862.042639] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.052722] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778483, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.191407] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778482, 'name': CreateVM_Task, 'duration_secs': 0.385249} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.191585] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1862.192313] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.192481] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.192818] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1862.193096] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5da0943a-539b-4327-aa3f-4a659b516e2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.199141] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1862.199141] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52644b37-6635-4c60-fe3b-1c7592fdc976" [ 1862.199141] env[62619]: _type = "Task" [ 1862.199141] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.208112] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52644b37-6635-4c60-fe3b-1c7592fdc976, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.217788] env[62619]: DEBUG nova.network.neutron [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Successfully updated port: b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1862.434852] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.521284] env[62619]: DEBUG nova.compute.utils [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1862.522666] env[62619]: DEBUG nova.compute.manager [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1862.522838] env[62619]: DEBUG nova.network.neutron [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1862.552900] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778483, 'name': ReconfigVM_Task, 'duration_secs': 0.293793} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.555345] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Reconfigured VM instance instance-0000006a to attach disk [datastore1] a0953370-77f2-4e3b-a92e-cb12b3a82361/a0953370-77f2-4e3b-a92e-cb12b3a82361.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1862.558013] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e28bd0c7-20de-4d59-9b4a-3bb3d8949ba0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.565470] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1862.565470] env[62619]: value = "task-1778484" [ 1862.565470] env[62619]: _type = "Task" [ 1862.565470] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.576420] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778484, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.577958] env[62619]: DEBUG nova.policy [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91b100cc8f94b93af086dafebe29092', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c24c9d49d8d4104a0868f126eb3a26e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1862.620864] env[62619]: DEBUG nova.network.neutron [req-e45316c5-af9c-402a-9003-f548eac624b0 req-78d43046-788d-4045-9599-6af3dd72fb3c service nova] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Updated VIF entry in instance network info cache for port 7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1862.621235] env[62619]: DEBUG nova.network.neutron [req-e45316c5-af9c-402a-9003-f548eac624b0 req-78d43046-788d-4045-9599-6af3dd72fb3c service nova] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Updating instance_info_cache with network_info: [{"id": "7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f", "address": "fa:16:3e:c1:fb:59", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ac08a6c-7d", "ovs_interfaceid": "7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.711519] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52644b37-6635-4c60-fe3b-1c7592fdc976, 'name': SearchDatastore_Task, 'duration_secs': 0.0098} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.711821] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.712063] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1862.712320] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.712471] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.712649] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1862.712912] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-472f8d1b-54f0-4b45-b533-671d669ce5e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.720566] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.720727] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.720843] env[62619]: DEBUG nova.network.neutron [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1862.722905] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1862.723091] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1862.723983] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27f74a2a-4f26-468c-aa00-b7e818e23bcc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.728309] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f9e78b-57eb-481c-ad33-a2e9d8240958 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.734025] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1862.734025] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528a0e04-6e24-bcc8-196a-e0691f3c7069" [ 1862.734025] env[62619]: _type = "Task" [ 1862.734025] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.741410] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91521c63-b9ea-4369-9962-56307d964aa8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.750434] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528a0e04-6e24-bcc8-196a-e0691f3c7069, 'name': SearchDatastore_Task, 'duration_secs': 0.009172} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.778711] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55251008-7826-4106-9ccd-58e456f124a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.781505] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134c7feb-1ac5-46dd-8b50-bc9071765f3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.787536] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1862.787536] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527279c1-bfb8-42e1-1bb9-5525b3c5e17d" [ 1862.787536] env[62619]: _type = "Task" [ 1862.787536] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.793555] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cd3b86-4aa6-4fe5-a227-ba26b42b1966 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.807863] env[62619]: DEBUG nova.compute.provider_tree [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1862.811976] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527279c1-bfb8-42e1-1bb9-5525b3c5e17d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.851753] env[62619]: DEBUG nova.network.neutron [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Successfully created port: ce3520ea-f75e-4d6a-a27f-de90d6383823 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1863.025989] env[62619]: DEBUG nova.compute.manager [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1863.076702] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778484, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.125140] env[62619]: DEBUG oslo_concurrency.lockutils [req-e45316c5-af9c-402a-9003-f548eac624b0 req-78d43046-788d-4045-9599-6af3dd72fb3c service nova] Releasing lock "refresh_cache-f4e85890-ca7d-45a7-92ff-ab881c21c7ed" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.270804] env[62619]: DEBUG nova.network.neutron [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1863.303929] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527279c1-bfb8-42e1-1bb9-5525b3c5e17d, 'name': SearchDatastore_Task, 'duration_secs': 0.020879} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.304222] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.304477] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] f4e85890-ca7d-45a7-92ff-ab881c21c7ed/f4e85890-ca7d-45a7-92ff-ab881c21c7ed.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1863.304730] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-102f3e9b-d777-4e2e-99c4-4886e834abf3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.311995] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1863.311995] env[62619]: value = "task-1778485" [ 1863.311995] env[62619]: _type = "Task" [ 1863.311995] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.317195] env[62619]: DEBUG nova.scheduler.client.report [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1863.326094] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.497507] env[62619]: DEBUG nova.network.neutron [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updating instance_info_cache with network_info: [{"id": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "address": "fa:16:3e:58:c2:30", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb52c0c61-cd", "ovs_interfaceid": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.531968] env[62619]: INFO nova.virt.block_device [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Booting with volume d3e2944a-ae36-4f80-82db-68a3de2d143e at /dev/sda [ 1863.576922] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778484, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.578478] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-293457e4-c465-4951-a6b8-4d2d0125ad96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.588872] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b0db08-eb1d-4ad0-b517-f4c0514d812d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.623254] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4787170-ba71-4f73-8fab-cdd94f06b5e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.633731] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84efb750-37fd-48bf-8db2-a0d78541be3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.672224] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e9ddb5-c937-43d4-807c-4e4391c74793 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.681538] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979b02c6-d4d9-4921-9384-35163d8a8d68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.699503] env[62619]: DEBUG nova.virt.block_device [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating existing volume attachment record: 9be2ffbf-70b4-4f3f-b61d-43e3294340c4 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1863.824032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.804s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.826233] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467872} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.826826] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.392s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.828825] env[62619]: INFO nova.compute.claims [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1863.832233] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] f4e85890-ca7d-45a7-92ff-ab881c21c7ed/f4e85890-ca7d-45a7-92ff-ab881c21c7ed.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1863.832630] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1863.835252] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ead7bb4d-bbac-4ded-800a-99a7479ae2ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.843018] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1863.843018] env[62619]: value = "task-1778486" [ 1863.843018] env[62619]: _type = "Task" [ 1863.843018] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.849149] env[62619]: DEBUG nova.compute.manager [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received event network-vif-plugged-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1863.850153] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] Acquiring lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.850153] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.850153] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.850805] env[62619]: DEBUG nova.compute.manager [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] No waiting events found dispatching network-vif-plugged-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1863.851283] env[62619]: WARNING nova.compute.manager [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received unexpected event network-vif-plugged-b52c0c61-cdaa-4ec8-b935-3229b930c548 for instance with vm_state building and task_state spawning. [ 1863.854090] env[62619]: DEBUG nova.compute.manager [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received event network-changed-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1863.854090] env[62619]: DEBUG nova.compute.manager [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Refreshing instance network info cache due to event network-changed-b52c0c61-cdaa-4ec8-b935-3229b930c548. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1863.854090] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] Acquiring lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.856488] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.857839] env[62619]: INFO nova.scheduler.client.report [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Deleted allocations for instance cee0356c-25d7-48ca-be09-16b0e1b56a41 [ 1864.000458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.000458] env[62619]: DEBUG nova.compute.manager [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Instance network_info: |[{"id": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "address": "fa:16:3e:58:c2:30", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb52c0c61-cd", "ovs_interfaceid": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1864.000704] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] Acquired lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.001030] env[62619]: DEBUG nova.network.neutron [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Refreshing network info cache for port b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1864.002553] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:c2:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b52c0c61-cdaa-4ec8-b935-3229b930c548', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1864.009740] env[62619]: DEBUG oslo.service.loopingcall [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1864.010196] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1864.010937] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27d09c89-b3ee-4873-9610-ab40fca301fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.031485] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1864.031485] env[62619]: value = "task-1778487" [ 1864.031485] env[62619]: _type = "Task" [ 1864.031485] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.039549] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778487, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.075087] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778484, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.352178] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068123} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.352462] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1864.354042] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b91dd3e-8278-4ea9-8c84-c78949ec1a78 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.378281] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] f4e85890-ca7d-45a7-92ff-ab881c21c7ed/f4e85890-ca7d-45a7-92ff-ab881c21c7ed.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1864.378801] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc01871d-9785-49f2-9b23-ed43716c9772 tempest-ListServerFiltersTestJSON-541805745 tempest-ListServerFiltersTestJSON-541805745-project-member] Lock "cee0356c-25d7-48ca-be09-16b0e1b56a41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.138s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.380360] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4cb0957f-3450-4d80-ac4b-c9e2ca32fea8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.403214] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1864.403214] env[62619]: value = "task-1778488" [ 1864.403214] env[62619]: _type = "Task" [ 1864.403214] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.413741] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778488, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.546870] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778487, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.576875] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778484, 'name': Rename_Task, 'duration_secs': 1.847872} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.576875] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1864.577198] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-115623c8-ba3f-4f20-8120-aa93011edcfd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.584824] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1864.584824] env[62619]: value = "task-1778489" [ 1864.584824] env[62619]: _type = "Task" [ 1864.584824] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.596741] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778489, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.913646] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778488, 'name': ReconfigVM_Task, 'duration_secs': 0.338338} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.913988] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Reconfigured VM instance instance-0000006b to attach disk [datastore1] f4e85890-ca7d-45a7-92ff-ab881c21c7ed/f4e85890-ca7d-45a7-92ff-ab881c21c7ed.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1864.915583] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35579d61-1a9b-470f-ac35-def8c22eccaa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.923320] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1864.923320] env[62619]: value = "task-1778490" [ 1864.923320] env[62619]: _type = "Task" [ 1864.923320] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.926490] env[62619]: DEBUG nova.network.neutron [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updated VIF entry in instance network info cache for port b52c0c61-cdaa-4ec8-b935-3229b930c548. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1864.926826] env[62619]: DEBUG nova.network.neutron [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updating instance_info_cache with network_info: [{"id": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "address": "fa:16:3e:58:c2:30", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb52c0c61-cd", "ovs_interfaceid": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1864.935389] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778490, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.050015] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778487, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.098750] env[62619]: DEBUG oslo_vmware.api [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778489, 'name': PowerOnVM_Task, 'duration_secs': 0.479518} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.098750] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1865.098947] env[62619]: INFO nova.compute.manager [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Took 9.81 seconds to spawn the instance on the hypervisor. [ 1865.099225] env[62619]: DEBUG nova.compute.manager [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1865.100068] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec495d17-9b42-4d5c-85db-d3b798d72a98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.117917] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8eb720-0a76-4d74-b81d-1b684e5b2c13 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.126397] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bba4c69-dae7-448d-85b1-cbc2ed9bbd14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.159963] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24e9fc9-2d51-4acb-8237-b93e1403ef09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.169341] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fcd248-e293-4524-b807-889e00cdaee4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.183807] env[62619]: DEBUG nova.compute.provider_tree [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1865.340981] env[62619]: DEBUG nova.network.neutron [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Successfully updated port: ce3520ea-f75e-4d6a-a27f-de90d6383823 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1865.432362] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe07ff70-576b-4a3c-941c-8aa0ed3e921c req-3c83b1ea-1c67-4a2d-8a95-cc4f8e24b15f service nova] Releasing lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.432774] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778490, 'name': Rename_Task, 'duration_secs': 0.207389} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.433031] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1865.433288] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64d6cb43-48a7-4f03-add7-bcd83ea35f90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.439849] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1865.439849] env[62619]: value = "task-1778491" [ 1865.439849] env[62619]: _type = "Task" [ 1865.439849] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.448181] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778491, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.548184] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778487, 'name': CreateVM_Task, 'duration_secs': 1.422707} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.548424] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1865.549423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.549714] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.550208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1865.550677] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a1a9b1b-dbf5-4e60-b731-1a3c8afd102f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.559019] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1865.559019] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a55132-331c-d0e8-1131-986beb974975" [ 1865.559019] env[62619]: _type = "Task" [ 1865.559019] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.572570] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a55132-331c-d0e8-1131-986beb974975, 'name': SearchDatastore_Task, 'duration_secs': 0.011999} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.573160] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.573552] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1865.573788] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.573935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.574123] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1865.575353] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73c666d1-b327-449e-a4ad-0f8fbd701693 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.585143] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1865.585143] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1865.585795] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bc239d0-89ac-419e-a7e7-752aecc829d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.591175] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1865.591175] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d69a09-768a-2da4-8b29-2de075ebd5aa" [ 1865.591175] env[62619]: _type = "Task" [ 1865.591175] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.600267] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d69a09-768a-2da4-8b29-2de075ebd5aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.622023] env[62619]: INFO nova.compute.manager [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Took 18.59 seconds to build instance. [ 1865.687362] env[62619]: DEBUG nova.scheduler.client.report [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1865.805114] env[62619]: DEBUG nova.compute.manager [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1865.805996] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1865.806373] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1865.806669] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1865.807016] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1865.807305] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1865.807602] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1865.807932] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1865.808234] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1865.808647] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1865.808978] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1865.809362] env[62619]: DEBUG nova.virt.hardware [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1865.811596] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc37a79-7877-4e27-83b6-7712fb703e89 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.826148] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a990a9e7-a02b-4a21-b62f-c4fd51bd9a02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.850091] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.850411] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.850720] env[62619]: DEBUG nova.network.neutron [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1865.853122] env[62619]: DEBUG oslo_concurrency.lockutils [None req-149c0dbe-323a-4c6f-8c88-888b5556bf36 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.853636] env[62619]: DEBUG oslo_concurrency.lockutils [None req-149c0dbe-323a-4c6f-8c88-888b5556bf36 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.854117] env[62619]: DEBUG nova.objects.instance [None req-149c0dbe-323a-4c6f-8c88-888b5556bf36 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'flavor' on Instance uuid 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1865.952133] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778491, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.992833] env[62619]: DEBUG nova.compute.manager [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Received event network-vif-plugged-ce3520ea-f75e-4d6a-a27f-de90d6383823 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1865.993125] env[62619]: DEBUG oslo_concurrency.lockutils [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] Acquiring lock "21d9fc7a-228e-4b33-8534-55285d4e6e96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.994392] env[62619]: DEBUG oslo_concurrency.lockutils [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.994392] env[62619]: DEBUG oslo_concurrency.lockutils [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.994392] env[62619]: DEBUG nova.compute.manager [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] No waiting events found dispatching network-vif-plugged-ce3520ea-f75e-4d6a-a27f-de90d6383823 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1865.994392] env[62619]: WARNING nova.compute.manager [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Received unexpected event network-vif-plugged-ce3520ea-f75e-4d6a-a27f-de90d6383823 for instance with vm_state building and task_state spawning. [ 1865.994392] env[62619]: DEBUG nova.compute.manager [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Received event network-changed-ce3520ea-f75e-4d6a-a27f-de90d6383823 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1865.994392] env[62619]: DEBUG nova.compute.manager [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Refreshing instance network info cache due to event network-changed-ce3520ea-f75e-4d6a-a27f-de90d6383823. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1865.994392] env[62619]: DEBUG oslo_concurrency.lockutils [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] Acquiring lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.104375] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d69a09-768a-2da4-8b29-2de075ebd5aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010373} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.105305] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dbbbd70-9dce-41ef-a854-75a989151af2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.112628] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1866.112628] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529beb80-2d60-933f-a969-1088e6046896" [ 1866.112628] env[62619]: _type = "Task" [ 1866.112628] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.121989] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529beb80-2d60-933f-a969-1088e6046896, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.122823] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e4372714-44e4-4dc0-8458-d4ecac69f502 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.104s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.196035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.369s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.196448] env[62619]: DEBUG nova.compute.manager [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1866.283144] env[62619]: DEBUG nova.compute.manager [req-85c66313-0326-4f10-9b06-ac21d9d059be req-39a4b6dd-3b96-4347-b3f0-75a949fdc1c0 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received event network-changed-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1866.283144] env[62619]: DEBUG nova.compute.manager [req-85c66313-0326-4f10-9b06-ac21d9d059be req-39a4b6dd-3b96-4347-b3f0-75a949fdc1c0 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Refreshing instance network info cache due to event network-changed-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1866.283957] env[62619]: DEBUG oslo_concurrency.lockutils [req-85c66313-0326-4f10-9b06-ac21d9d059be req-39a4b6dd-3b96-4347-b3f0-75a949fdc1c0 service nova] Acquiring lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.284304] env[62619]: DEBUG oslo_concurrency.lockutils [req-85c66313-0326-4f10-9b06-ac21d9d059be req-39a4b6dd-3b96-4347-b3f0-75a949fdc1c0 service nova] Acquired lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.284650] env[62619]: DEBUG nova.network.neutron [req-85c66313-0326-4f10-9b06-ac21d9d059be req-39a4b6dd-3b96-4347-b3f0-75a949fdc1c0 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Refreshing network info cache for port b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1866.358860] env[62619]: DEBUG nova.objects.instance [None req-149c0dbe-323a-4c6f-8c88-888b5556bf36 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'pci_requests' on Instance uuid 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1866.403168] env[62619]: DEBUG nova.network.neutron [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1866.450835] env[62619]: DEBUG oslo_vmware.api [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778491, 'name': PowerOnVM_Task, 'duration_secs': 0.736653} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.451355] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1866.451600] env[62619]: INFO nova.compute.manager [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Took 7.15 seconds to spawn the instance on the hypervisor. [ 1866.452124] env[62619]: DEBUG nova.compute.manager [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1866.452682] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c08d02f-dbcf-49e7-8e5b-fd6703e3db19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.625715] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529beb80-2d60-933f-a969-1088e6046896, 'name': SearchDatastore_Task, 'duration_secs': 0.011532} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.626622] env[62619]: DEBUG nova.network.neutron [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance_info_cache with network_info: [{"id": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "address": "fa:16:3e:90:45:ed", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce3520ea-f7", "ovs_interfaceid": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.627995] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.628081] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 13eeb4aa-0f20-4aed-9453-66afb0ff1152/13eeb4aa-0f20-4aed-9453-66afb0ff1152.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1866.628599] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c615e217-2634-4d98-a214-eec5c2a40bd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.638307] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1866.638307] env[62619]: value = "task-1778492" [ 1866.638307] env[62619]: _type = "Task" [ 1866.638307] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.648148] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.704815] env[62619]: DEBUG nova.compute.utils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1866.706574] env[62619]: DEBUG nova.compute.manager [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1866.707505] env[62619]: DEBUG nova.network.neutron [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1866.783927] env[62619]: DEBUG nova.policy [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9271ec762cd42168ab2c9957d38eaba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4010737526cd4a3aa36f15a187051010', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1866.861063] env[62619]: DEBUG nova.objects.base [None req-149c0dbe-323a-4c6f-8c88-888b5556bf36 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Object Instance<8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e> lazy-loaded attributes: flavor,pci_requests {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1866.861487] env[62619]: DEBUG nova.network.neutron [None req-149c0dbe-323a-4c6f-8c88-888b5556bf36 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1866.978566] env[62619]: DEBUG oslo_concurrency.lockutils [None req-149c0dbe-323a-4c6f-8c88-888b5556bf36 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.124s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.982947] env[62619]: INFO nova.compute.manager [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Took 18.32 seconds to build instance. [ 1867.129398] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.129445] env[62619]: DEBUG nova.compute.manager [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Instance network_info: |[{"id": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "address": "fa:16:3e:90:45:ed", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce3520ea-f7", "ovs_interfaceid": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1867.129978] env[62619]: DEBUG oslo_concurrency.lockutils [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] Acquired lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.129978] env[62619]: DEBUG nova.network.neutron [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Refreshing network info cache for port ce3520ea-f75e-4d6a-a27f-de90d6383823 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1867.131295] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:45:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce3520ea-f75e-4d6a-a27f-de90d6383823', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1867.149382] env[62619]: DEBUG oslo.service.loopingcall [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1867.151073] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1867.156377] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5702f034-8af7-41c6-97ca-fa71cab1ebdf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.180029] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778492, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.181965] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1867.181965] env[62619]: value = "task-1778493" [ 1867.181965] env[62619]: _type = "Task" [ 1867.181965] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.192969] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778493, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.210174] env[62619]: DEBUG nova.compute.manager [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1867.442581] env[62619]: DEBUG nova.network.neutron [req-85c66313-0326-4f10-9b06-ac21d9d059be req-39a4b6dd-3b96-4347-b3f0-75a949fdc1c0 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updated VIF entry in instance network info cache for port b78d1b6e-104b-4041-bcc5-5802f9f6fe3c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1867.443046] env[62619]: DEBUG nova.network.neutron [req-85c66313-0326-4f10-9b06-ac21d9d059be req-39a4b6dd-3b96-4347-b3f0-75a949fdc1c0 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updating instance_info_cache with network_info: [{"id": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "address": "fa:16:3e:3b:fe:49", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb78d1b6e-10", "ovs_interfaceid": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.488587] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2f94950f-7aaf-4d4a-bbb9-77229b7b856f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.830s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.591075] env[62619]: DEBUG nova.network.neutron [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Successfully created port: 0de99671-66a1-4b86-9417-2955fdf1dcba {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1867.678686] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778492, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546017} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.678953] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 13eeb4aa-0f20-4aed-9453-66afb0ff1152/13eeb4aa-0f20-4aed-9453-66afb0ff1152.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1867.679266] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1867.681834] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fdf4453-b320-4f2c-9b26-672f1935fc4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.688024] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1867.688024] env[62619]: value = "task-1778494" [ 1867.688024] env[62619]: _type = "Task" [ 1867.688024] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.694861] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778493, 'name': CreateVM_Task, 'duration_secs': 0.42144} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.695507] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1867.696315] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'boot_index': 0, 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369165', 'volume_id': 'd3e2944a-ae36-4f80-82db-68a3de2d143e', 'name': 'volume-d3e2944a-ae36-4f80-82db-68a3de2d143e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21d9fc7a-228e-4b33-8534-55285d4e6e96', 'attached_at': '', 'detached_at': '', 'volume_id': 'd3e2944a-ae36-4f80-82db-68a3de2d143e', 'serial': 'd3e2944a-ae36-4f80-82db-68a3de2d143e'}, 'attachment_id': '9be2ffbf-70b4-4f3f-b61d-43e3294340c4', 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=62619) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1867.696587] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Root volume attach. Driver type: vmdk {{(pid=62619) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1867.697451] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36579c1-b576-4e8d-a898-89137de02458 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.703225] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778494, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.710611] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44b02ab-12cc-4a86-b1de-91bdd0a140ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.722996] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd98477f-45ff-4322-a442-377f86de18e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.735049] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-c904b228-bfdb-470c-8569-41702932422b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.748916] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1867.748916] env[62619]: value = "task-1778495" [ 1867.748916] env[62619]: _type = "Task" [ 1867.748916] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.763398] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778495, 'name': RelocateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.945696] env[62619]: DEBUG oslo_concurrency.lockutils [req-85c66313-0326-4f10-9b06-ac21d9d059be req-39a4b6dd-3b96-4347-b3f0-75a949fdc1c0 service nova] Releasing lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.204022] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778494, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.270128} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.204022] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1868.204022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed718033-d2a2-462e-8c70-26d3533cab4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.227183] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 13eeb4aa-0f20-4aed-9453-66afb0ff1152/13eeb4aa-0f20-4aed-9453-66afb0ff1152.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1868.228499] env[62619]: DEBUG nova.compute.manager [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1868.230601] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50fa7b23-0e8e-44ea-87e5-c3c4e3107857 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.254794] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1868.254794] env[62619]: value = "task-1778496" [ 1868.254794] env[62619]: _type = "Task" [ 1868.254794] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.262343] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778495, 'name': RelocateVM_Task} progress is 20%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.268398] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778496, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.286288] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1868.286612] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1868.286860] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1868.287958] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1868.288315] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1868.288549] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1868.288858] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1868.289043] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1868.289275] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1868.289458] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1868.289687] env[62619]: DEBUG nova.virt.hardware [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1868.290738] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ea0031-e933-4c6f-a4c1-b2730b88be8b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.300090] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fbbc6fa-c6d5-48f1-a396-0861ed120385 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.385087] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f276830d-92ea-4d6c-b414-7870c5bde952 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.393430] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4a906e70-2175-4ecc-8848-9c02d7c5bf49 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Suspending the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1868.393605] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-394fbe60-d378-4b4f-ba65-f6248419b6e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.402606] env[62619]: DEBUG oslo_vmware.api [None req-4a906e70-2175-4ecc-8848-9c02d7c5bf49 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1868.402606] env[62619]: value = "task-1778497" [ 1868.402606] env[62619]: _type = "Task" [ 1868.402606] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.413432] env[62619]: DEBUG oslo_vmware.api [None req-4a906e70-2175-4ecc-8848-9c02d7c5bf49 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778497, 'name': SuspendVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.651637] env[62619]: DEBUG nova.network.neutron [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updated VIF entry in instance network info cache for port ce3520ea-f75e-4d6a-a27f-de90d6383823. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1868.652050] env[62619]: DEBUG nova.network.neutron [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance_info_cache with network_info: [{"id": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "address": "fa:16:3e:90:45:ed", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce3520ea-f7", "ovs_interfaceid": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.769411] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778495, 'name': RelocateVM_Task, 'duration_secs': 0.70257} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.772933] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1868.773194] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369165', 'volume_id': 'd3e2944a-ae36-4f80-82db-68a3de2d143e', 'name': 'volume-d3e2944a-ae36-4f80-82db-68a3de2d143e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21d9fc7a-228e-4b33-8534-55285d4e6e96', 'attached_at': '', 'detached_at': '', 'volume_id': 'd3e2944a-ae36-4f80-82db-68a3de2d143e', 'serial': 'd3e2944a-ae36-4f80-82db-68a3de2d143e'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1868.773528] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778496, 'name': ReconfigVM_Task, 'duration_secs': 0.49703} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.774302] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1aab464-9b03-47d3-ab7a-a956625bdac6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.777360] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 13eeb4aa-0f20-4aed-9453-66afb0ff1152/13eeb4aa-0f20-4aed-9453-66afb0ff1152.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1868.778062] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-adf0f255-f7c1-465e-8073-c07a1bfa0c52 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.798216] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2fc191-14ad-48d5-bad0-a4c5067a63c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.801847] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1868.801847] env[62619]: value = "task-1778498" [ 1868.801847] env[62619]: _type = "Task" [ 1868.801847] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.824957] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] volume-d3e2944a-ae36-4f80-82db-68a3de2d143e/volume-d3e2944a-ae36-4f80-82db-68a3de2d143e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1868.825837] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d7a5cb6-894a-4167-bab8-ed5a2ca3b0dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.844606] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778498, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.851451] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1868.851451] env[62619]: value = "task-1778499" [ 1868.851451] env[62619]: _type = "Task" [ 1868.851451] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.861415] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778499, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.913873] env[62619]: DEBUG oslo_vmware.api [None req-4a906e70-2175-4ecc-8848-9c02d7c5bf49 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778497, 'name': SuspendVM_Task} progress is 58%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.099809] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.100046] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.100384] env[62619]: DEBUG nova.objects.instance [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'flavor' on Instance uuid 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1869.155778] env[62619]: DEBUG oslo_concurrency.lockutils [req-b92951fc-e7aa-4da7-9d01-9fd5146224c2 req-ac7ec17e-bf13-468c-80b8-b5febb2ae35d service nova] Releasing lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.314940] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778498, 'name': Rename_Task, 'duration_secs': 0.249981} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.315254] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1869.315462] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad255cd6-7af3-4ba2-9670-1132f16fb9dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.323172] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1869.323172] env[62619]: value = "task-1778500" [ 1869.323172] env[62619]: _type = "Task" [ 1869.323172] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.334820] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778500, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.361279] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778499, 'name': ReconfigVM_Task, 'duration_secs': 0.451164} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.361605] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Reconfigured VM instance instance-0000006d to attach disk [datastore1] volume-d3e2944a-ae36-4f80-82db-68a3de2d143e/volume-d3e2944a-ae36-4f80-82db-68a3de2d143e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1869.367965] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff5520e5-edce-4933-a2cb-a3070b85f9cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.384884] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1869.384884] env[62619]: value = "task-1778501" [ 1869.384884] env[62619]: _type = "Task" [ 1869.384884] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.394558] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778501, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.412757] env[62619]: DEBUG oslo_vmware.api [None req-4a906e70-2175-4ecc-8848-9c02d7c5bf49 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778497, 'name': SuspendVM_Task, 'duration_secs': 0.867145} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.413032] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4a906e70-2175-4ecc-8848-9c02d7c5bf49 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Suspended the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1869.413219] env[62619]: DEBUG nova.compute.manager [None req-4a906e70-2175-4ecc-8848-9c02d7c5bf49 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1869.414015] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6da92ca-2590-49f6-9c63-7674e84e515d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.723496] env[62619]: DEBUG nova.objects.instance [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'pci_requests' on Instance uuid 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1869.833915] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778500, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.895557] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778501, 'name': ReconfigVM_Task, 'duration_secs': 0.311816} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.895962] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369165', 'volume_id': 'd3e2944a-ae36-4f80-82db-68a3de2d143e', 'name': 'volume-d3e2944a-ae36-4f80-82db-68a3de2d143e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21d9fc7a-228e-4b33-8534-55285d4e6e96', 'attached_at': '', 'detached_at': '', 'volume_id': 'd3e2944a-ae36-4f80-82db-68a3de2d143e', 'serial': 'd3e2944a-ae36-4f80-82db-68a3de2d143e'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1869.896424] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0339a1a-c9ac-44a0-a6aa-847c3fa621c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.903757] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1869.903757] env[62619]: value = "task-1778502" [ 1869.903757] env[62619]: _type = "Task" [ 1869.903757] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.912671] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778502, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.173069] env[62619]: DEBUG nova.compute.manager [req-a2dc6cbc-00ec-4555-b2cd-f82c80b84a1e req-f7a7e86b-03b0-4d52-90d1-9047ee35ef0e service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Received event network-vif-plugged-0de99671-66a1-4b86-9417-2955fdf1dcba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1870.173698] env[62619]: DEBUG oslo_concurrency.lockutils [req-a2dc6cbc-00ec-4555-b2cd-f82c80b84a1e req-f7a7e86b-03b0-4d52-90d1-9047ee35ef0e service nova] Acquiring lock "cec0ea75-042d-4ee5-91d5-cad86456ab97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.173976] env[62619]: DEBUG oslo_concurrency.lockutils [req-a2dc6cbc-00ec-4555-b2cd-f82c80b84a1e req-f7a7e86b-03b0-4d52-90d1-9047ee35ef0e service nova] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.174243] env[62619]: DEBUG oslo_concurrency.lockutils [req-a2dc6cbc-00ec-4555-b2cd-f82c80b84a1e req-f7a7e86b-03b0-4d52-90d1-9047ee35ef0e service nova] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.174465] env[62619]: DEBUG nova.compute.manager [req-a2dc6cbc-00ec-4555-b2cd-f82c80b84a1e req-f7a7e86b-03b0-4d52-90d1-9047ee35ef0e service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] No waiting events found dispatching network-vif-plugged-0de99671-66a1-4b86-9417-2955fdf1dcba {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1870.174673] env[62619]: WARNING nova.compute.manager [req-a2dc6cbc-00ec-4555-b2cd-f82c80b84a1e req-f7a7e86b-03b0-4d52-90d1-9047ee35ef0e service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Received unexpected event network-vif-plugged-0de99671-66a1-4b86-9417-2955fdf1dcba for instance with vm_state building and task_state spawning. [ 1870.191479] env[62619]: DEBUG nova.network.neutron [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Successfully updated port: 0de99671-66a1-4b86-9417-2955fdf1dcba {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1870.211851] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquiring lock "b56800e8-1eab-4589-8d09-961f73973981" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.211851] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lock "b56800e8-1eab-4589-8d09-961f73973981" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.228216] env[62619]: DEBUG nova.objects.base [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Object Instance<8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e> lazy-loaded attributes: flavor,pci_requests {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1870.228216] env[62619]: DEBUG nova.network.neutron [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1870.282839] env[62619]: DEBUG nova.policy [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8d937f303584c3daea133a6283fd5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23d77e73a09d492695fbfe6ac2c93371', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1870.333713] env[62619]: DEBUG oslo_vmware.api [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778500, 'name': PowerOnVM_Task, 'duration_secs': 0.567334} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.333981] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1870.334382] env[62619]: INFO nova.compute.manager [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1870.334576] env[62619]: DEBUG nova.compute.manager [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1870.335378] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a5721d-48dc-4f78-b935-8ffa0980b616 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.414459] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778502, 'name': Rename_Task, 'duration_secs': 0.139945} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.414670] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1870.414953] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fac55563-e8af-493c-81cd-01b137b2d934 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.422442] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1870.422442] env[62619]: value = "task-1778503" [ 1870.422442] env[62619]: _type = "Task" [ 1870.422442] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.430852] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778503, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.566834] env[62619]: DEBUG nova.network.neutron [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Successfully created port: e4b11f7c-b59b-4267-a9da-8fbd14f25154 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1870.646091] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.646459] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.646884] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.646983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.647354] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.649882] env[62619]: INFO nova.compute.manager [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Terminating instance [ 1870.693878] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.694057] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquired lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.694223] env[62619]: DEBUG nova.network.neutron [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1870.713215] env[62619]: DEBUG nova.compute.manager [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1870.856931] env[62619]: INFO nova.compute.manager [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Took 17.72 seconds to build instance. [ 1870.932289] env[62619]: DEBUG oslo_vmware.api [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778503, 'name': PowerOnVM_Task, 'duration_secs': 0.502573} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.932589] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1870.932803] env[62619]: INFO nova.compute.manager [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Took 5.13 seconds to spawn the instance on the hypervisor. [ 1870.932981] env[62619]: DEBUG nova.compute.manager [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1870.933746] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db61599-10df-40be-b352-3bb01d66ebc3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.153432] env[62619]: DEBUG nova.compute.manager [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1871.153690] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1871.154619] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546bb010-9087-4f03-9cb1-86b35996551a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.163389] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1871.163667] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7abf41b1-6196-48b3-b809-a92a41b869d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.224944] env[62619]: DEBUG nova.network.neutron [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1871.234434] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.234697] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.236594] env[62619]: INFO nova.compute.claims [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1871.272083] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1871.272328] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1871.272536] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleting the datastore file [datastore1] f4e85890-ca7d-45a7-92ff-ab881c21c7ed {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1871.272844] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04a77867-fb37-4db5-907c-c9eb906f3f8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.281011] env[62619]: DEBUG oslo_vmware.api [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1871.281011] env[62619]: value = "task-1778505" [ 1871.281011] env[62619]: _type = "Task" [ 1871.281011] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.292220] env[62619]: DEBUG oslo_vmware.api [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778505, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.362312] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1ab6d8fb-25a9-47aa-85e4-1133105ddefe tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.236s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.453865] env[62619]: INFO nova.compute.manager [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Took 15.97 seconds to build instance. [ 1871.542523] env[62619]: DEBUG nova.network.neutron [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Updating instance_info_cache with network_info: [{"id": "0de99671-66a1-4b86-9417-2955fdf1dcba", "address": "fa:16:3e:fc:2a:8e", "network": {"id": "6a1847de-b585-445d-8064-dc33dc365719", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1852054191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4010737526cd4a3aa36f15a187051010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0de99671-66", "ovs_interfaceid": "0de99671-66a1-4b86-9417-2955fdf1dcba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.793260] env[62619]: DEBUG oslo_vmware.api [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778505, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216682} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.793592] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1871.793794] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1871.793961] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1871.794144] env[62619]: INFO nova.compute.manager [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1871.794381] env[62619]: DEBUG oslo.service.loopingcall [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1871.794579] env[62619]: DEBUG nova.compute.manager [-] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1871.794675] env[62619]: DEBUG nova.network.neutron [-] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1871.956459] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0082e40c-61fb-4fdb-ac63-639d4e4d75d4 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.476s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.123668] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Releasing lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.123668] env[62619]: DEBUG nova.compute.manager [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Instance network_info: |[{"id": "0de99671-66a1-4b86-9417-2955fdf1dcba", "address": "fa:16:3e:fc:2a:8e", "network": {"id": "6a1847de-b585-445d-8064-dc33dc365719", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1852054191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4010737526cd4a3aa36f15a187051010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0de99671-66", "ovs_interfaceid": "0de99671-66a1-4b86-9417-2955fdf1dcba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1872.123668] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:2a:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4c8c8fd-baca-4e60-97dc-ff0418d63215', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0de99671-66a1-4b86-9417-2955fdf1dcba', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1872.123668] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Creating folder: Project (4010737526cd4a3aa36f15a187051010). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1872.123668] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-316ed5dc-958e-44e0-8b16-4cc139d02699 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.123668] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Created folder: Project (4010737526cd4a3aa36f15a187051010) in parent group-v368875. [ 1872.123668] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Creating folder: Instances. Parent ref: group-v369170. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1872.123668] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2666225-5c1d-4a84-b3ab-73b8d9ca17f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.123668] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Created folder: Instances in parent group-v369170. [ 1872.123668] env[62619]: DEBUG oslo.service.loopingcall [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1872.123668] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1872.123668] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a262dad-e3a9-41df-b7dc-d7fb75fb6d82 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.123668] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1872.123668] env[62619]: value = "task-1778508" [ 1872.123668] env[62619]: _type = "Task" [ 1872.123668] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.123668] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778508, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.239472] env[62619]: DEBUG nova.compute.manager [req-a8d1b959-b025-493e-ae69-ef6b972d267c req-ff22d102-8da7-4a32-8d2e-6636ed0b6d24 service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Received event network-changed-0de99671-66a1-4b86-9417-2955fdf1dcba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1872.239687] env[62619]: DEBUG nova.compute.manager [req-a8d1b959-b025-493e-ae69-ef6b972d267c req-ff22d102-8da7-4a32-8d2e-6636ed0b6d24 service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Refreshing instance network info cache due to event network-changed-0de99671-66a1-4b86-9417-2955fdf1dcba. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1872.239895] env[62619]: DEBUG oslo_concurrency.lockutils [req-a8d1b959-b025-493e-ae69-ef6b972d267c req-ff22d102-8da7-4a32-8d2e-6636ed0b6d24 service nova] Acquiring lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.240371] env[62619]: DEBUG oslo_concurrency.lockutils [req-a8d1b959-b025-493e-ae69-ef6b972d267c req-ff22d102-8da7-4a32-8d2e-6636ed0b6d24 service nova] Acquired lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.240596] env[62619]: DEBUG nova.network.neutron [req-a8d1b959-b025-493e-ae69-ef6b972d267c req-ff22d102-8da7-4a32-8d2e-6636ed0b6d24 service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Refreshing network info cache for port 0de99671-66a1-4b86-9417-2955fdf1dcba {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1872.419849] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6191e54e-12f4-442c-a6b2-90b748e12799 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.428317] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9d73a4-41cd-40cc-87ba-64e0e6d8d3b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.460012] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d85f8b-f4d9-4600-b089-08a83d809923 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.468450] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbab1ca-f9b0-4624-a400-342ffbb8b92b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.483332] env[62619]: DEBUG nova.compute.provider_tree [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1872.615224] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778508, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.986019] env[62619]: DEBUG nova.network.neutron [req-a8d1b959-b025-493e-ae69-ef6b972d267c req-ff22d102-8da7-4a32-8d2e-6636ed0b6d24 service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Updated VIF entry in instance network info cache for port 0de99671-66a1-4b86-9417-2955fdf1dcba. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1872.986019] env[62619]: DEBUG nova.network.neutron [req-a8d1b959-b025-493e-ae69-ef6b972d267c req-ff22d102-8da7-4a32-8d2e-6636ed0b6d24 service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Updating instance_info_cache with network_info: [{"id": "0de99671-66a1-4b86-9417-2955fdf1dcba", "address": "fa:16:3e:fc:2a:8e", "network": {"id": "6a1847de-b585-445d-8064-dc33dc365719", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1852054191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4010737526cd4a3aa36f15a187051010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0de99671-66", "ovs_interfaceid": "0de99671-66a1-4b86-9417-2955fdf1dcba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.986019] env[62619]: DEBUG nova.scheduler.client.report [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1873.081438] env[62619]: DEBUG nova.network.neutron [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Successfully updated port: e4b11f7c-b59b-4267-a9da-8fbd14f25154 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1873.116747] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778508, 'name': CreateVM_Task, 'duration_secs': 0.593719} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.116916] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1873.117610] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.117773] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.118504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1873.118504] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96bfd403-f316-4bc4-9a7f-756673fa30c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.124057] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1873.124057] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529affae-41af-e38a-0792-046925d59898" [ 1873.124057] env[62619]: _type = "Task" [ 1873.124057] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.132361] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529affae-41af-e38a-0792-046925d59898, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.320479] env[62619]: DEBUG nova.network.neutron [-] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.486490] env[62619]: DEBUG oslo_concurrency.lockutils [req-a8d1b959-b025-493e-ae69-ef6b972d267c req-ff22d102-8da7-4a32-8d2e-6636ed0b6d24 service nova] Releasing lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1873.490833] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.256s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.491523] env[62619]: DEBUG nova.compute.manager [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1873.584348] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.584596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.584831] env[62619]: DEBUG nova.network.neutron [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1873.635424] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529affae-41af-e38a-0792-046925d59898, 'name': SearchDatastore_Task, 'duration_secs': 0.025107} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.635716] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1873.635953] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1873.636195] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.636340] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.636517] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1873.636790] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07c515a6-b176-46ba-8e8e-93c09eceae2a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.645801] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1873.646045] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1873.646884] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-904b1448-5b08-4e66-8e1f-f2e38432fcec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.652506] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1873.652506] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f73283-7060-98e8-4bd3-4a9862085664" [ 1873.652506] env[62619]: _type = "Task" [ 1873.652506] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.661469] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f73283-7060-98e8-4bd3-4a9862085664, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.822821] env[62619]: INFO nova.compute.manager [-] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Took 2.03 seconds to deallocate network for instance. [ 1873.996428] env[62619]: DEBUG nova.compute.utils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1873.997790] env[62619]: DEBUG nova.compute.manager [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1873.997946] env[62619]: DEBUG nova.network.neutron [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1874.044101] env[62619]: DEBUG nova.policy [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd9dee3ada7549978dbc6a20ea6acba6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9466cd9b92754aa2b4f17440d880b592', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1874.121082] env[62619]: WARNING nova.network.neutron [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] ed952a81-cb24-4b52-a137-9ceeefb896cf already exists in list: networks containing: ['ed952a81-cb24-4b52-a137-9ceeefb896cf']. ignoring it [ 1874.167127] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f73283-7060-98e8-4bd3-4a9862085664, 'name': SearchDatastore_Task, 'duration_secs': 0.012761} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.168025] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17cb59b4-618c-46df-9dc4-6e6508ee716f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.174355] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1874.174355] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d4c012-c889-eb38-f11a-17314b1ad61d" [ 1874.174355] env[62619]: _type = "Task" [ 1874.174355] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.181607] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d4c012-c889-eb38-f11a-17314b1ad61d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.298426] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received event network-vif-plugged-e4b11f7c-b59b-4267-a9da-8fbd14f25154 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1874.298651] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.298988] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.299192] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.299361] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] No waiting events found dispatching network-vif-plugged-e4b11f7c-b59b-4267-a9da-8fbd14f25154 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1874.299829] env[62619]: WARNING nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received unexpected event network-vif-plugged-e4b11f7c-b59b-4267-a9da-8fbd14f25154 for instance with vm_state active and task_state None. [ 1874.300353] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Received event network-vif-deleted-7ac08a6c-7d10-4d7e-b077-95ca31ea8a2f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1874.300564] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received event network-changed-e4b11f7c-b59b-4267-a9da-8fbd14f25154 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1874.300741] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Refreshing instance network info cache due to event network-changed-e4b11f7c-b59b-4267-a9da-8fbd14f25154. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1874.300978] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Acquiring lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1874.310281] env[62619]: DEBUG nova.network.neutron [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Successfully created port: 55b17715-2239-4550-9b2c-78d7f930a2fd {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1874.332997] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.332997] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.332997] env[62619]: DEBUG nova.objects.instance [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lazy-loading 'resources' on Instance uuid f4e85890-ca7d-45a7-92ff-ab881c21c7ed {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1874.504106] env[62619]: DEBUG nova.compute.manager [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1874.634951] env[62619]: DEBUG nova.network.neutron [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [{"id": "0364c1be-595c-4984-9173-39fd5163c9ad", "address": "fa:16:3e:d1:3c:33", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364c1be-59", "ovs_interfaceid": "0364c1be-595c-4984-9173-39fd5163c9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e4b11f7c-b59b-4267-a9da-8fbd14f25154", "address": "fa:16:3e:57:cb:7b", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4b11f7c-b5", "ovs_interfaceid": "e4b11f7c-b59b-4267-a9da-8fbd14f25154", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.688937] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d4c012-c889-eb38-f11a-17314b1ad61d, 'name': SearchDatastore_Task, 'duration_secs': 0.030748} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.688937] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1874.688937] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cec0ea75-042d-4ee5-91d5-cad86456ab97/cec0ea75-042d-4ee5-91d5-cad86456ab97.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1874.688937] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bab15451-a427-4f76-a6e3-b1762931ac00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.695798] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1874.695798] env[62619]: value = "task-1778509" [ 1874.695798] env[62619]: _type = "Task" [ 1874.695798] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.708617] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.711078] env[62619]: DEBUG nova.compute.manager [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1875.013355] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab32cd8-2083-4320-922e-79447d1e4a97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.021166] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c21cdf0-fed8-4020-beee-67765daa9e47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.052267] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be38bfe5-91aa-464f-b53a-2e53ce454bf9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.060685] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f6818a-ad74-4533-b781-925cfb689e94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.076626] env[62619]: DEBUG nova.compute.provider_tree [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1875.137219] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1875.138068] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.138368] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.138733] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Acquired lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.139010] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Refreshing network info cache for port e4b11f7c-b59b-4267-a9da-8fbd14f25154 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1875.140977] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef1d23c-1983-4ce3-8d79-ca791300fc62 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.159186] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1875.159420] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1875.159601] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1875.159815] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1875.159984] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1875.160157] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1875.160376] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1875.160522] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1875.160710] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1875.160872] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1875.161051] env[62619]: DEBUG nova.virt.hardware [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1875.167183] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Reconfiguring VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1875.167718] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7768379d-1dbe-4953-840d-1a9c423f1231 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.186239] env[62619]: DEBUG oslo_vmware.api [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1875.186239] env[62619]: value = "task-1778510" [ 1875.186239] env[62619]: _type = "Task" [ 1875.186239] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.194269] env[62619]: DEBUG oslo_vmware.api [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778510, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.204803] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778509, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.231462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.513178] env[62619]: DEBUG nova.compute.manager [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1875.542558] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1875.542887] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1875.543085] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1875.543282] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1875.543448] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1875.543599] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1875.543819] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1875.544252] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1875.544252] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1875.544347] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1875.544554] env[62619]: DEBUG nova.virt.hardware [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1875.545545] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63306c1e-27e8-4b0c-bc27-f80dd3f0aa5d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.554659] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127d1f62-bdad-412d-90bb-0e33a9ab9e1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.579998] env[62619]: DEBUG nova.scheduler.client.report [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1875.696208] env[62619]: DEBUG oslo_vmware.api [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778510, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.705149] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778509, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.933065} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.705389] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] cec0ea75-042d-4ee5-91d5-cad86456ab97/cec0ea75-042d-4ee5-91d5-cad86456ab97.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1875.705604] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1875.706069] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b270c57f-4aaf-47f8-bac5-28daf496071a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.712597] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1875.712597] env[62619]: value = "task-1778511" [ 1875.712597] env[62619]: _type = "Task" [ 1875.712597] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.720952] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778511, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.860158] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updated VIF entry in instance network info cache for port e4b11f7c-b59b-4267-a9da-8fbd14f25154. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1875.860158] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [{"id": "0364c1be-595c-4984-9173-39fd5163c9ad", "address": "fa:16:3e:d1:3c:33", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364c1be-59", "ovs_interfaceid": "0364c1be-595c-4984-9173-39fd5163c9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e4b11f7c-b59b-4267-a9da-8fbd14f25154", "address": "fa:16:3e:57:cb:7b", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4b11f7c-b5", "ovs_interfaceid": "e4b11f7c-b59b-4267-a9da-8fbd14f25154", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1876.015450] env[62619]: DEBUG nova.network.neutron [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Successfully updated port: 55b17715-2239-4550-9b2c-78d7f930a2fd {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1876.084577] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.752s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.087612] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.855s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.106029] env[62619]: INFO nova.scheduler.client.report [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted allocations for instance f4e85890-ca7d-45a7-92ff-ab881c21c7ed [ 1876.196945] env[62619]: DEBUG oslo_vmware.api [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778510, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.222205] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778511, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064735} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.222487] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1876.223275] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b188fb8-3dd5-4a2b-89b4-239c521448b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.245260] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] cec0ea75-042d-4ee5-91d5-cad86456ab97/cec0ea75-042d-4ee5-91d5-cad86456ab97.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1876.245552] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2635c2b2-4835-4ba1-90e5-29ea9f59e68e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.265043] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1876.265043] env[62619]: value = "task-1778512" [ 1876.265043] env[62619]: _type = "Task" [ 1876.265043] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.273098] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778512, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.325847] env[62619]: DEBUG nova.compute.manager [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] [instance: b56800e8-1eab-4589-8d09-961f73973981] Received event network-vif-plugged-55b17715-2239-4550-9b2c-78d7f930a2fd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1876.326071] env[62619]: DEBUG oslo_concurrency.lockutils [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] Acquiring lock "b56800e8-1eab-4589-8d09-961f73973981-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.326330] env[62619]: DEBUG oslo_concurrency.lockutils [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] Lock "b56800e8-1eab-4589-8d09-961f73973981-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.326506] env[62619]: DEBUG oslo_concurrency.lockutils [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] Lock "b56800e8-1eab-4589-8d09-961f73973981-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.326667] env[62619]: DEBUG nova.compute.manager [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] [instance: b56800e8-1eab-4589-8d09-961f73973981] No waiting events found dispatching network-vif-plugged-55b17715-2239-4550-9b2c-78d7f930a2fd {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1876.326862] env[62619]: WARNING nova.compute.manager [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] [instance: b56800e8-1eab-4589-8d09-961f73973981] Received unexpected event network-vif-plugged-55b17715-2239-4550-9b2c-78d7f930a2fd for instance with vm_state building and task_state spawning. [ 1876.327053] env[62619]: DEBUG nova.compute.manager [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] [instance: b56800e8-1eab-4589-8d09-961f73973981] Received event network-changed-55b17715-2239-4550-9b2c-78d7f930a2fd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1876.327212] env[62619]: DEBUG nova.compute.manager [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] [instance: b56800e8-1eab-4589-8d09-961f73973981] Refreshing instance network info cache due to event network-changed-55b17715-2239-4550-9b2c-78d7f930a2fd. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1876.327387] env[62619]: DEBUG oslo_concurrency.lockutils [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] Acquiring lock "refresh_cache-b56800e8-1eab-4589-8d09-961f73973981" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.327518] env[62619]: DEBUG oslo_concurrency.lockutils [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] Acquired lock "refresh_cache-b56800e8-1eab-4589-8d09-961f73973981" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.327667] env[62619]: DEBUG nova.network.neutron [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] [instance: b56800e8-1eab-4589-8d09-961f73973981] Refreshing network info cache for port 55b17715-2239-4550-9b2c-78d7f930a2fd {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1876.362453] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Releasing lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.362689] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Received event network-changed-28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1876.362866] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Refreshing instance network info cache due to event network-changed-28e9da04-af12-4a21-b4ee-408c492669ef. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1876.363076] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Acquiring lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.363221] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Acquired lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.363383] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Refreshing network info cache for port 28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1876.519861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquiring lock "refresh_cache-b56800e8-1eab-4589-8d09-961f73973981" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.591863] env[62619]: INFO nova.compute.claims [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1876.614016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf7004c9-9153-4566-a940-bc186d81d80d tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "f4e85890-ca7d-45a7-92ff-ab881c21c7ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.968s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.698668] env[62619]: DEBUG oslo_vmware.api [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778510, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.709251] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1876.709505] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1876.774659] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778512, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.871792] env[62619]: DEBUG nova.network.neutron [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] [instance: b56800e8-1eab-4589-8d09-961f73973981] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1876.986521] env[62619]: DEBUG nova.network.neutron [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] [instance: b56800e8-1eab-4589-8d09-961f73973981] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.097952] env[62619]: INFO nova.compute.resource_tracker [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating resource usage from migration 10114fb1-8a4f-4bb5-b620-f74b7b500b7d [ 1877.101334] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Updated VIF entry in instance network info cache for port 28e9da04-af12-4a21-b4ee-408c492669ef. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1877.101575] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Updating instance_info_cache with network_info: [{"id": "28e9da04-af12-4a21-b4ee-408c492669ef", "address": "fa:16:3e:00:1b:cf", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28e9da04-af", "ovs_interfaceid": "28e9da04-af12-4a21-b4ee-408c492669ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.197675] env[62619]: DEBUG oslo_vmware.api [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778510, 'name': ReconfigVM_Task, 'duration_secs': 1.783581} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.198198] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.198488] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Reconfigured VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1877.276029] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778512, 'name': ReconfigVM_Task, 'duration_secs': 0.993731} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.277088] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Reconfigured VM instance instance-0000006e to attach disk [datastore1] cec0ea75-042d-4ee5-91d5-cad86456ab97/cec0ea75-042d-4ee5-91d5-cad86456ab97.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1877.278153] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370a97b3-316e-40fd-8274-729862ab7843 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.280544] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1cb8347-7199-4fe7-9a6a-61feb315e4a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.286675] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0480cb0d-f67e-4847-9cb3-cb4979112c1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.290561] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1877.290561] env[62619]: value = "task-1778513" [ 1877.290561] env[62619]: _type = "Task" [ 1877.290561] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.318947] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfc85fb-4e49-4eb5-b27c-d4b17f58c9c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.324659] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778513, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.329577] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b3a4d5-2149-41e8-9d96-211d3e0ba64a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.342635] env[62619]: DEBUG nova.compute.provider_tree [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1877.489861] env[62619]: DEBUG oslo_concurrency.lockutils [req-c5dadd04-4d35-4a8a-b982-3c0b231d3c37 req-04c05ca6-782a-41a7-9203-577fa516149f service nova] Releasing lock "refresh_cache-b56800e8-1eab-4589-8d09-961f73973981" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.490286] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquired lock "refresh_cache-b56800e8-1eab-4589-8d09-961f73973981" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.490450] env[62619]: DEBUG nova.network.neutron [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1877.607465] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Releasing lock "refresh_cache-006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.607850] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received event network-changed-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1877.608098] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Refreshing instance network info cache due to event network-changed-b52c0c61-cdaa-4ec8-b935-3229b930c548. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1877.608509] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Acquiring lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.608729] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Acquired lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.608915] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Refreshing network info cache for port b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1877.705669] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e34fdeb3-a591-44f7-88ab-fd4aa9e6a54a tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.605s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.800759] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778513, 'name': Rename_Task, 'duration_secs': 0.155888} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.801090] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1877.801366] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ad8f755-ae54-404b-bb33-6db7d5300709 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.808806] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1877.808806] env[62619]: value = "task-1778514" [ 1877.808806] env[62619]: _type = "Task" [ 1877.808806] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.816923] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778514, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.846804] env[62619]: DEBUG nova.scheduler.client.report [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1878.022777] env[62619]: DEBUG nova.network.neutron [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1878.141722] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.141722] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.172618] env[62619]: DEBUG nova.network.neutron [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Updating instance_info_cache with network_info: [{"id": "55b17715-2239-4550-9b2c-78d7f930a2fd", "address": "fa:16:3e:b6:46:a9", "network": {"id": "32c33e5b-d016-4c51-8a7a-66ca1279b5b6", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-705226394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9466cd9b92754aa2b4f17440d880b592", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b17715-22", "ovs_interfaceid": "55b17715-2239-4550-9b2c-78d7f930a2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.319312] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778514, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.327184] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updated VIF entry in instance network info cache for port b52c0c61-cdaa-4ec8-b935-3229b930c548. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1878.327556] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updating instance_info_cache with network_info: [{"id": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "address": "fa:16:3e:58:c2:30", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb52c0c61-cd", "ovs_interfaceid": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.351404] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.264s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.351404] env[62619]: INFO nova.compute.manager [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Migrating [ 1878.646462] env[62619]: DEBUG nova.compute.manager [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1878.675342] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Releasing lock "refresh_cache-b56800e8-1eab-4589-8d09-961f73973981" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.675664] env[62619]: DEBUG nova.compute.manager [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Instance network_info: |[{"id": "55b17715-2239-4550-9b2c-78d7f930a2fd", "address": "fa:16:3e:b6:46:a9", "network": {"id": "32c33e5b-d016-4c51-8a7a-66ca1279b5b6", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-705226394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9466cd9b92754aa2b4f17440d880b592", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b17715-22", "ovs_interfaceid": "55b17715-2239-4550-9b2c-78d7f930a2fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1878.676119] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:46:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55b17715-2239-4550-9b2c-78d7f930a2fd', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1878.683645] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Creating folder: Project (9466cd9b92754aa2b4f17440d880b592). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1878.684572] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8036c66b-dcf2-4cfb-bd24-857677dbd8df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.696834] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Created folder: Project (9466cd9b92754aa2b4f17440d880b592) in parent group-v368875. [ 1878.697133] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Creating folder: Instances. Parent ref: group-v369173. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1878.697392] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d62bf49-2fdc-4618-ad60-f06fb27f42e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.707797] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Created folder: Instances in parent group-v369173. [ 1878.708090] env[62619]: DEBUG oslo.service.loopingcall [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1878.708326] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b56800e8-1eab-4589-8d09-961f73973981] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1878.708578] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4714326c-4e73-44c9-bd5e-e538cbce406b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.729256] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1878.729256] env[62619]: value = "task-1778517" [ 1878.729256] env[62619]: _type = "Task" [ 1878.729256] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.736468] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778517, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.821961] env[62619]: DEBUG oslo_vmware.api [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778514, 'name': PowerOnVM_Task, 'duration_secs': 0.766549} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.822143] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1878.822431] env[62619]: INFO nova.compute.manager [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Took 10.59 seconds to spawn the instance on the hypervisor. [ 1878.822730] env[62619]: DEBUG nova.compute.manager [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1878.824027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11f90e8-ba49-49d0-9aa2-f23460c0cbf6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.831033] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Releasing lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.831303] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Received event network-changed-ce3520ea-f75e-4d6a-a27f-de90d6383823 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1878.831471] env[62619]: DEBUG nova.compute.manager [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Refreshing instance network info cache due to event network-changed-ce3520ea-f75e-4d6a-a27f-de90d6383823. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1878.831676] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Acquiring lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.831815] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Acquired lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.831973] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Refreshing network info cache for port ce3520ea-f75e-4d6a-a27f-de90d6383823 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1878.866542] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.170422] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.170820] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.172434] env[62619]: INFO nova.compute.claims [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1879.246311] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778517, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.346134] env[62619]: INFO nova.compute.manager [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Took 16.93 seconds to build instance. [ 1879.557308] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updated VIF entry in instance network info cache for port ce3520ea-f75e-4d6a-a27f-de90d6383823. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1879.557687] env[62619]: DEBUG nova.network.neutron [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance_info_cache with network_info: [{"id": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "address": "fa:16:3e:90:45:ed", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce3520ea-f7", "ovs_interfaceid": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.723631] env[62619]: DEBUG oslo_concurrency.lockutils [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-e84ee31b-42f6-44bd-be17-381a0796e37b" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.724029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-e84ee31b-42f6-44bd-be17-381a0796e37b" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.724244] env[62619]: DEBUG nova.objects.instance [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'flavor' on Instance uuid 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1879.739010] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778517, 'name': CreateVM_Task, 'duration_secs': 0.676797} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.740337] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b56800e8-1eab-4589-8d09-961f73973981] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1879.740958] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.741132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.741433] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1879.741881] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2c35f23-ea16-46a0-bc30-40a3a41ffbfe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.746859] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for the task: (returnval){ [ 1879.746859] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280351f-6c1f-3977-0e58-f9f8bb416e71" [ 1879.746859] env[62619]: _type = "Task" [ 1879.746859] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.755354] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280351f-6c1f-3977-0e58-f9f8bb416e71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.848058] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d9cd07b-cdee-41fb-a992-b666c70e3341 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.440s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.060224] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b71bc9e-e5f4-4160-a64e-745097c9f41d req-50f85a45-4749-49db-b227-c03dbc3162cb service nova] Releasing lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.060798] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.061012] env[62619]: DEBUG nova.network.neutron [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1880.258461] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280351f-6c1f-3977-0e58-f9f8bb416e71, 'name': SearchDatastore_Task, 'duration_secs': 0.010626} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.262026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.262026] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1880.262026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.262026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.262026] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1880.264705] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b761d4f5-6dac-45d9-bba2-5247e3b39e27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.277016] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1880.277016] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1880.277016] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5fb6fad-d256-459f-b378-13cf104b4f20 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.283704] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for the task: (returnval){ [ 1880.283704] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c507dd-94bf-a165-35ff-390ba962ea51" [ 1880.283704] env[62619]: _type = "Task" [ 1880.283704] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.296397] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c507dd-94bf-a165-35ff-390ba962ea51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.371542] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8530c8c-af4b-4357-b322-92720bea6e23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.379722] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2ca711-6722-42a7-9159-00811cdfccdb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.411576] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5682a9f-d605-4400-a790-1500163db422 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.419535] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d260463b-546d-4657-b418-870d756836ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.433542] env[62619]: DEBUG nova.compute.provider_tree [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1880.460417] env[62619]: DEBUG nova.objects.instance [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'pci_requests' on Instance uuid 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1880.709166] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1880.797102] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52c507dd-94bf-a165-35ff-390ba962ea51, 'name': SearchDatastore_Task, 'duration_secs': 0.020417} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.798134] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7343b941-e472-4bbe-99c3-7f16acd0cf4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.803830] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for the task: (returnval){ [ 1880.803830] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b73358-dee5-1839-a029-5c4062867072" [ 1880.803830] env[62619]: _type = "Task" [ 1880.803830] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.811914] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b73358-dee5-1839-a029-5c4062867072, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.936916] env[62619]: DEBUG nova.scheduler.client.report [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1880.962310] env[62619]: DEBUG nova.objects.base [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Object Instance<8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e> lazy-loaded attributes: flavor,pci_requests {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1880.962594] env[62619]: DEBUG nova.network.neutron [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1881.050584] env[62619]: DEBUG nova.policy [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8d937f303584c3daea133a6283fd5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23d77e73a09d492695fbfe6ac2c93371', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1881.130577] env[62619]: DEBUG nova.network.neutron [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance_info_cache with network_info: [{"id": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "address": "fa:16:3e:90:45:ed", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce3520ea-f7", "ovs_interfaceid": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.154971] env[62619]: DEBUG nova.compute.manager [req-6e6b52fc-d223-4fff-9274-7f4a41d688bd req-08ab8f97-dce9-4900-acdf-883e551560fc service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Received event network-changed-0de99671-66a1-4b86-9417-2955fdf1dcba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1881.155461] env[62619]: DEBUG nova.compute.manager [req-6e6b52fc-d223-4fff-9274-7f4a41d688bd req-08ab8f97-dce9-4900-acdf-883e551560fc service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Refreshing instance network info cache due to event network-changed-0de99671-66a1-4b86-9417-2955fdf1dcba. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1881.155461] env[62619]: DEBUG oslo_concurrency.lockutils [req-6e6b52fc-d223-4fff-9274-7f4a41d688bd req-08ab8f97-dce9-4900-acdf-883e551560fc service nova] Acquiring lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.155753] env[62619]: DEBUG oslo_concurrency.lockutils [req-6e6b52fc-d223-4fff-9274-7f4a41d688bd req-08ab8f97-dce9-4900-acdf-883e551560fc service nova] Acquired lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.155835] env[62619]: DEBUG nova.network.neutron [req-6e6b52fc-d223-4fff-9274-7f4a41d688bd req-08ab8f97-dce9-4900-acdf-883e551560fc service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Refreshing network info cache for port 0de99671-66a1-4b86-9417-2955fdf1dcba {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1881.212850] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.325351] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b73358-dee5-1839-a029-5c4062867072, 'name': SearchDatastore_Task, 'duration_secs': 0.008763} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.325626] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.325882] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] b56800e8-1eab-4589-8d09-961f73973981/b56800e8-1eab-4589-8d09-961f73973981.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1881.326158] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32b9964a-cb6a-4c13-83dd-03b5c88dd225 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.333382] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for the task: (returnval){ [ 1881.333382] env[62619]: value = "task-1778518" [ 1881.333382] env[62619]: _type = "Task" [ 1881.333382] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.341047] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.442429] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.271s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.442892] env[62619]: DEBUG nova.compute.manager [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1881.445460] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.233s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.445645] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.445790] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1881.447336] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd97d801-665d-4fe8-8694-446af0bbb48d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.456780] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fab8011-f981-40c7-baa3-6be51a6a0594 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.473708] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bc4ef0-61d2-4d80-8790-5bdb73f887f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.488030] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a022844-a5f1-45a0-9336-880890300f77 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.527823] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179793MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1881.528012] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.528291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.633809] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.843243] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.410051} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.843575] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] b56800e8-1eab-4589-8d09-961f73973981/b56800e8-1eab-4589-8d09-961f73973981.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1881.843707] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1881.843957] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72b0f601-1d31-4067-8648-501ba376ff23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.851327] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for the task: (returnval){ [ 1881.851327] env[62619]: value = "task-1778519" [ 1881.851327] env[62619]: _type = "Task" [ 1881.851327] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.861204] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778519, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.894222] env[62619]: DEBUG nova.network.neutron [req-6e6b52fc-d223-4fff-9274-7f4a41d688bd req-08ab8f97-dce9-4900-acdf-883e551560fc service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Updated VIF entry in instance network info cache for port 0de99671-66a1-4b86-9417-2955fdf1dcba. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1881.894593] env[62619]: DEBUG nova.network.neutron [req-6e6b52fc-d223-4fff-9274-7f4a41d688bd req-08ab8f97-dce9-4900-acdf-883e551560fc service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Updating instance_info_cache with network_info: [{"id": "0de99671-66a1-4b86-9417-2955fdf1dcba", "address": "fa:16:3e:fc:2a:8e", "network": {"id": "6a1847de-b585-445d-8064-dc33dc365719", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1852054191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4010737526cd4a3aa36f15a187051010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0de99671-66", "ovs_interfaceid": "0de99671-66a1-4b86-9417-2955fdf1dcba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.948184] env[62619]: DEBUG nova.compute.utils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1881.949870] env[62619]: DEBUG nova.compute.manager [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1881.950112] env[62619]: DEBUG nova.network.neutron [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1881.992300] env[62619]: DEBUG nova.policy [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd106b23f779045f788b2536afd8c623d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2377a52a195d4f0b9181207ab5741734', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1882.337018] env[62619]: DEBUG nova.network.neutron [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Successfully created port: ac4a39b7-453e-42d2-93cf-fa1d7e2640ee {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1882.360294] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778519, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059562} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.360545] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1882.361356] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1453785d-6b54-400b-a309-4d3d7841666f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.384061] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] b56800e8-1eab-4589-8d09-961f73973981/b56800e8-1eab-4589-8d09-961f73973981.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1882.384061] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44c15134-19e1-43c6-a05f-85eeaf796d83 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.397656] env[62619]: DEBUG oslo_concurrency.lockutils [req-6e6b52fc-d223-4fff-9274-7f4a41d688bd req-08ab8f97-dce9-4900-acdf-883e551560fc service nova] Releasing lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.403194] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for the task: (returnval){ [ 1882.403194] env[62619]: value = "task-1778520" [ 1882.403194] env[62619]: _type = "Task" [ 1882.403194] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.410898] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778520, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.453377] env[62619]: DEBUG nova.compute.manager [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1882.527357] env[62619]: DEBUG nova.compute.manager [req-8b9935a3-2ad0-4c31-b66e-8740fcaeaf27 req-46fd2017-b411-4007-9144-1219d29fc55a service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received event network-vif-plugged-e84ee31b-42f6-44bd-be17-381a0796e37b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1882.527638] env[62619]: DEBUG oslo_concurrency.lockutils [req-8b9935a3-2ad0-4c31-b66e-8740fcaeaf27 req-46fd2017-b411-4007-9144-1219d29fc55a service nova] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.527790] env[62619]: DEBUG oslo_concurrency.lockutils [req-8b9935a3-2ad0-4c31-b66e-8740fcaeaf27 req-46fd2017-b411-4007-9144-1219d29fc55a service nova] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.527955] env[62619]: DEBUG oslo_concurrency.lockutils [req-8b9935a3-2ad0-4c31-b66e-8740fcaeaf27 req-46fd2017-b411-4007-9144-1219d29fc55a service nova] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.528141] env[62619]: DEBUG nova.compute.manager [req-8b9935a3-2ad0-4c31-b66e-8740fcaeaf27 req-46fd2017-b411-4007-9144-1219d29fc55a service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] No waiting events found dispatching network-vif-plugged-e84ee31b-42f6-44bd-be17-381a0796e37b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1882.529018] env[62619]: WARNING nova.compute.manager [req-8b9935a3-2ad0-4c31-b66e-8740fcaeaf27 req-46fd2017-b411-4007-9144-1219d29fc55a service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received unexpected event network-vif-plugged-e84ee31b-42f6-44bd-be17-381a0796e37b for instance with vm_state active and task_state None. [ 1882.543095] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Applying migration context for instance 21d9fc7a-228e-4b33-8534-55285d4e6e96 as it has an incoming, in-progress migration 10114fb1-8a4f-4bb5-b620-f74b7b500b7d. Migration status is migrating {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1882.544682] env[62619]: INFO nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating resource usage from migration 10114fb1-8a4f-4bb5-b620-f74b7b500b7d [ 1882.568272] env[62619]: DEBUG nova.network.neutron [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Successfully updated port: e84ee31b-42f6-44bd-be17-381a0796e37b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1882.575086] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.575239] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 8745aa7f-9848-4320-94b5-08b7e3bccf80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.575359] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e302e431-1f95-4ab5-bfca-59450fd887f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.575474] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 52b29fef-eab6-4541-a570-af9c0c021a75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.575583] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.575693] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 70265068-1185-4f23-b0b4-ed2378c17a89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.575800] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4c66bbdf-af6a-4705-8219-85cf19f8314e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.575907] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance a0953370-77f2-4e3b-a92e-cb12b3a82361 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.576016] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 13eeb4aa-0f20-4aed-9453-66afb0ff1152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.576126] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cec0ea75-042d-4ee5-91d5-cad86456ab97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.576243] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance b56800e8-1eab-4589-8d09-961f73973981 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.576371] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Migration 10114fb1-8a4f-4bb5-b620-f74b7b500b7d is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1882.576482] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 21d9fc7a-228e-4b33-8534-55285d4e6e96 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.576592] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 8ed2e4af-b484-4cd5-89c0-6ba60188127a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1882.576796] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1882.576927] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3264MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1882.758845] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2772d8f-f44e-4bfa-93d3-85d0f4149c8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.768025] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9723aeb1-deaf-4cdf-82bd-861864fd6f7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.797207] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42899ccf-f312-4a84-8910-e1299a823587 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.804703] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269f46a0-c64d-4966-8bc1-011fef5feebe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.818208] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1882.913593] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778520, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.070768] env[62619]: DEBUG oslo_concurrency.lockutils [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.071062] env[62619]: DEBUG oslo_concurrency.lockutils [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.071320] env[62619]: DEBUG nova.network.neutron [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1883.159757] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b345c6-7fbc-425d-a336-598ee194789c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.178617] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance '21d9fc7a-228e-4b33-8534-55285d4e6e96' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1883.321412] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1883.415030] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778520, 'name': ReconfigVM_Task, 'duration_secs': 0.878963} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.415030] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Reconfigured VM instance instance-0000006f to attach disk [datastore1] b56800e8-1eab-4589-8d09-961f73973981/b56800e8-1eab-4589-8d09-961f73973981.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1883.415822] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5159a728-438c-482b-817f-59fb5ac50f48 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.422170] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for the task: (returnval){ [ 1883.422170] env[62619]: value = "task-1778521" [ 1883.422170] env[62619]: _type = "Task" [ 1883.422170] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.429809] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778521, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.463264] env[62619]: DEBUG nova.compute.manager [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1883.491715] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1883.491971] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1883.492145] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1883.492334] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1883.492480] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1883.492627] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1883.493055] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1883.493055] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1883.493150] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1883.493310] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1883.493928] env[62619]: DEBUG nova.virt.hardware [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1883.494369] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea810f7-f5ca-4849-8d73-aa0edad0bfa2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.502253] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a968455a-8fef-46fa-ab10-7922ff370fa4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.529583] env[62619]: DEBUG oslo_concurrency.lockutils [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.529884] env[62619]: DEBUG oslo_concurrency.lockutils [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.530084] env[62619]: DEBUG nova.compute.manager [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1883.531086] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f1d20a-83de-4a9b-a81f-29cb3d3a0fe6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.537380] env[62619]: DEBUG nova.compute.manager [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1883.538012] env[62619]: DEBUG nova.objects.instance [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'flavor' on Instance uuid 4c66bbdf-af6a-4705-8219-85cf19f8314e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1883.608424] env[62619]: WARNING nova.network.neutron [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] ed952a81-cb24-4b52-a137-9ceeefb896cf already exists in list: networks containing: ['ed952a81-cb24-4b52-a137-9ceeefb896cf']. ignoring it [ 1883.608726] env[62619]: WARNING nova.network.neutron [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] ed952a81-cb24-4b52-a137-9ceeefb896cf already exists in list: networks containing: ['ed952a81-cb24-4b52-a137-9ceeefb896cf']. ignoring it [ 1883.684605] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1883.687273] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-005676fd-904d-4a1c-a315-405042070cce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.694177] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1883.694177] env[62619]: value = "task-1778522" [ 1883.694177] env[62619]: _type = "Task" [ 1883.694177] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.702358] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778522, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.826399] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1883.826620] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.298s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.931364] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778521, 'name': Rename_Task, 'duration_secs': 0.139378} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.931742] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1883.931877] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54271b67-9a49-4256-bd52-9f2e8cee11d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.938010] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for the task: (returnval){ [ 1883.938010] env[62619]: value = "task-1778523" [ 1883.938010] env[62619]: _type = "Task" [ 1883.938010] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.950515] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778523, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.965146] env[62619]: DEBUG nova.compute.manager [req-cce97deb-2327-4c37-97bd-757d36e40d43 req-ee233e85-463f-4187-8191-b4a389f0a797 service nova] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Received event network-vif-plugged-ac4a39b7-453e-42d2-93cf-fa1d7e2640ee {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1883.965458] env[62619]: DEBUG oslo_concurrency.lockutils [req-cce97deb-2327-4c37-97bd-757d36e40d43 req-ee233e85-463f-4187-8191-b4a389f0a797 service nova] Acquiring lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.965755] env[62619]: DEBUG oslo_concurrency.lockutils [req-cce97deb-2327-4c37-97bd-757d36e40d43 req-ee233e85-463f-4187-8191-b4a389f0a797 service nova] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.966037] env[62619]: DEBUG oslo_concurrency.lockutils [req-cce97deb-2327-4c37-97bd-757d36e40d43 req-ee233e85-463f-4187-8191-b4a389f0a797 service nova] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.966037] env[62619]: DEBUG nova.compute.manager [req-cce97deb-2327-4c37-97bd-757d36e40d43 req-ee233e85-463f-4187-8191-b4a389f0a797 service nova] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] No waiting events found dispatching network-vif-plugged-ac4a39b7-453e-42d2-93cf-fa1d7e2640ee {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1883.966217] env[62619]: WARNING nova.compute.manager [req-cce97deb-2327-4c37-97bd-757d36e40d43 req-ee233e85-463f-4187-8191-b4a389f0a797 service nova] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Received unexpected event network-vif-plugged-ac4a39b7-453e-42d2-93cf-fa1d7e2640ee for instance with vm_state building and task_state spawning. [ 1884.016215] env[62619]: DEBUG nova.network.neutron [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Successfully updated port: ac4a39b7-453e-42d2-93cf-fa1d7e2640ee {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1884.019831] env[62619]: DEBUG nova.network.neutron [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [{"id": "0364c1be-595c-4984-9173-39fd5163c9ad", "address": "fa:16:3e:d1:3c:33", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364c1be-59", "ovs_interfaceid": "0364c1be-595c-4984-9173-39fd5163c9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e4b11f7c-b59b-4267-a9da-8fbd14f25154", "address": "fa:16:3e:57:cb:7b", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4b11f7c-b5", "ovs_interfaceid": "e4b11f7c-b59b-4267-a9da-8fbd14f25154", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e84ee31b-42f6-44bd-be17-381a0796e37b", "address": "fa:16:3e:e8:f1:56", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84ee31b-42", "ovs_interfaceid": "e84ee31b-42f6-44bd-be17-381a0796e37b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.204299] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778522, 'name': PowerOffVM_Task, 'duration_secs': 0.200416} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.204595] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1884.204809] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance '21d9fc7a-228e-4b33-8534-55285d4e6e96' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1884.448065] env[62619]: DEBUG oslo_vmware.api [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778523, 'name': PowerOnVM_Task, 'duration_secs': 0.445582} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.448329] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1884.448568] env[62619]: INFO nova.compute.manager [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Took 8.94 seconds to spawn the instance on the hypervisor. [ 1884.448738] env[62619]: DEBUG nova.compute.manager [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1884.449494] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8939acb5-eca3-464b-b187-d5a88aa7962d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.522147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.522848] env[62619]: DEBUG oslo_concurrency.lockutils [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.523010] env[62619]: DEBUG oslo_concurrency.lockutils [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.523479] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.523637] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.523945] env[62619]: DEBUG nova.network.neutron [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1884.526187] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfefbb1-ea66-4c67-8013-a02e605b9b5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.544342] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1884.544580] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1884.544736] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1884.544917] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1884.545078] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1884.545226] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1884.545424] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1884.545581] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1884.545744] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1884.545905] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1884.546080] env[62619]: DEBUG nova.virt.hardware [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1884.552775] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Reconfiguring VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1884.554933] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1884.555166] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-600be635-cb6d-4a0b-b358-87f14748a7ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.567680] env[62619]: DEBUG nova.compute.manager [req-f04b7456-cd74-4ae0-83f6-5158dfa5b24e req-4ea9fad8-e5bf-4a37-b3c7-3b1d02596e19 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received event network-changed-e84ee31b-42f6-44bd-be17-381a0796e37b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1884.567865] env[62619]: DEBUG nova.compute.manager [req-f04b7456-cd74-4ae0-83f6-5158dfa5b24e req-4ea9fad8-e5bf-4a37-b3c7-3b1d02596e19 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Refreshing instance network info cache due to event network-changed-e84ee31b-42f6-44bd-be17-381a0796e37b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1884.568079] env[62619]: DEBUG oslo_concurrency.lockutils [req-f04b7456-cd74-4ae0-83f6-5158dfa5b24e req-4ea9fad8-e5bf-4a37-b3c7-3b1d02596e19 service nova] Acquiring lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.568225] env[62619]: DEBUG oslo_concurrency.lockutils [req-f04b7456-cd74-4ae0-83f6-5158dfa5b24e req-4ea9fad8-e5bf-4a37-b3c7-3b1d02596e19 service nova] Acquired lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.568402] env[62619]: DEBUG nova.network.neutron [req-f04b7456-cd74-4ae0-83f6-5158dfa5b24e req-4ea9fad8-e5bf-4a37-b3c7-3b1d02596e19 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Refreshing network info cache for port e84ee31b-42f6-44bd-be17-381a0796e37b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1884.570565] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60a1e9b3-c1e4-40a7-822c-2d9d5c579a84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.578096] env[62619]: DEBUG oslo_vmware.api [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1884.578096] env[62619]: value = "task-1778525" [ 1884.578096] env[62619]: _type = "Task" [ 1884.578096] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.579646] env[62619]: DEBUG oslo_vmware.api [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1884.579646] env[62619]: value = "task-1778524" [ 1884.579646] env[62619]: _type = "Task" [ 1884.579646] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.594016] env[62619]: DEBUG oslo_vmware.api [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778525, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.594292] env[62619]: DEBUG oslo_vmware.api [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.711841] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1884.712199] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1884.712288] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1884.712493] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1884.712633] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1884.712769] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1884.712967] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1884.713135] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1884.713326] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1884.713503] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1884.713677] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1884.719492] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d101bf9e-df02-48c3-ac8c-2586701c6f11 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.737384] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1884.737384] env[62619]: value = "task-1778526" [ 1884.737384] env[62619]: _type = "Task" [ 1884.737384] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.745659] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778526, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.826553] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1884.826780] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1884.965205] env[62619]: INFO nova.compute.manager [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Took 13.75 seconds to build instance. [ 1885.063999] env[62619]: DEBUG nova.network.neutron [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1885.093929] env[62619]: DEBUG oslo_vmware.api [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778524, 'name': PowerOffVM_Task, 'duration_secs': 0.214008} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.097412] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1885.097633] env[62619]: DEBUG nova.compute.manager [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1885.100887] env[62619]: DEBUG oslo_vmware.api [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.100887] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85238b5-71d6-4b34-b884-cf4d63a865cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.209989] env[62619]: DEBUG nova.network.neutron [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance_info_cache with network_info: [{"id": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "address": "fa:16:3e:d8:d1:15", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac4a39b7-45", "ovs_interfaceid": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.247628] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778526, 'name': ReconfigVM_Task, 'duration_secs': 0.144163} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.247934] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance '21d9fc7a-228e-4b33-8534-55285d4e6e96' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1885.333725] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1885.333893] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1885.361055] env[62619]: DEBUG nova.network.neutron [req-f04b7456-cd74-4ae0-83f6-5158dfa5b24e req-4ea9fad8-e5bf-4a37-b3c7-3b1d02596e19 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updated VIF entry in instance network info cache for port e84ee31b-42f6-44bd-be17-381a0796e37b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1885.361611] env[62619]: DEBUG nova.network.neutron [req-f04b7456-cd74-4ae0-83f6-5158dfa5b24e req-4ea9fad8-e5bf-4a37-b3c7-3b1d02596e19 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [{"id": "0364c1be-595c-4984-9173-39fd5163c9ad", "address": "fa:16:3e:d1:3c:33", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364c1be-59", "ovs_interfaceid": "0364c1be-595c-4984-9173-39fd5163c9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e4b11f7c-b59b-4267-a9da-8fbd14f25154", "address": "fa:16:3e:57:cb:7b", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4b11f7c-b5", "ovs_interfaceid": "e4b11f7c-b59b-4267-a9da-8fbd14f25154", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e84ee31b-42f6-44bd-be17-381a0796e37b", "address": "fa:16:3e:e8:f1:56", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84ee31b-42", "ovs_interfaceid": "e84ee31b-42f6-44bd-be17-381a0796e37b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.467862] env[62619]: DEBUG oslo_concurrency.lockutils [None req-17c205cf-13a5-4c23-aa29-b669dd51755e tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lock "b56800e8-1eab-4589-8d09-961f73973981" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.257s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.488051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquiring lock "b56800e8-1eab-4589-8d09-961f73973981" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.488051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lock "b56800e8-1eab-4589-8d09-961f73973981" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.488265] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquiring lock "b56800e8-1eab-4589-8d09-961f73973981-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.488531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lock "b56800e8-1eab-4589-8d09-961f73973981-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.488699] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lock "b56800e8-1eab-4589-8d09-961f73973981-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.490893] env[62619]: INFO nova.compute.manager [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Terminating instance [ 1885.591391] env[62619]: DEBUG oslo_vmware.api [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778525, 'name': ReconfigVM_Task, 'duration_secs': 0.871487} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.591868] env[62619]: DEBUG oslo_concurrency.lockutils [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.592089] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Reconfigured VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1885.612646] env[62619]: DEBUG oslo_concurrency.lockutils [None req-751a8330-261a-41e6-b9aa-b5f420d71343 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.083s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.712985] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.713338] env[62619]: DEBUG nova.compute.manager [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Instance network_info: |[{"id": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "address": "fa:16:3e:d8:d1:15", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac4a39b7-45", "ovs_interfaceid": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1885.713779] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:d1:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac4a39b7-453e-42d2-93cf-fa1d7e2640ee', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1885.721284] env[62619]: DEBUG oslo.service.loopingcall [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1885.721500] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1885.721730] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d00e85c-e290-4b3d-bdd6-8dbee123e088 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.742375] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1885.742375] env[62619]: value = "task-1778527" [ 1885.742375] env[62619]: _type = "Task" [ 1885.742375] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.750242] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778527, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.754230] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1885.754483] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1885.754724] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1885.754942] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1885.755212] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1885.755269] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1885.755447] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1885.755605] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1885.755771] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1885.756019] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1885.756114] env[62619]: DEBUG nova.virt.hardware [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1885.761522] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1885.762101] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f1921de-694d-42ae-b0ab-021f3d27ebf4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.781624] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1885.781624] env[62619]: value = "task-1778528" [ 1885.781624] env[62619]: _type = "Task" [ 1885.781624] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.790304] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778528, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.864573] env[62619]: DEBUG oslo_concurrency.lockutils [req-f04b7456-cd74-4ae0-83f6-5158dfa5b24e req-4ea9fad8-e5bf-4a37-b3c7-3b1d02596e19 service nova] Releasing lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.994761] env[62619]: DEBUG nova.compute.manager [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1885.995123] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1885.996354] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cfc3f0-0d73-4da4-b46d-959048e05d29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.001196] env[62619]: DEBUG nova.compute.manager [req-67ebfcf6-4d20-49dd-bc7a-e669e27134be req-7c12ab11-76ec-4b11-983a-8c43e176c3f6 service nova] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Received event network-changed-ac4a39b7-453e-42d2-93cf-fa1d7e2640ee {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1886.001741] env[62619]: DEBUG nova.compute.manager [req-67ebfcf6-4d20-49dd-bc7a-e669e27134be req-7c12ab11-76ec-4b11-983a-8c43e176c3f6 service nova] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Refreshing instance network info cache due to event network-changed-ac4a39b7-453e-42d2-93cf-fa1d7e2640ee. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1886.001971] env[62619]: DEBUG oslo_concurrency.lockutils [req-67ebfcf6-4d20-49dd-bc7a-e669e27134be req-7c12ab11-76ec-4b11-983a-8c43e176c3f6 service nova] Acquiring lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.002133] env[62619]: DEBUG oslo_concurrency.lockutils [req-67ebfcf6-4d20-49dd-bc7a-e669e27134be req-7c12ab11-76ec-4b11-983a-8c43e176c3f6 service nova] Acquired lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.002297] env[62619]: DEBUG nova.network.neutron [req-67ebfcf6-4d20-49dd-bc7a-e669e27134be req-7c12ab11-76ec-4b11-983a-8c43e176c3f6 service nova] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Refreshing network info cache for port ac4a39b7-453e-42d2-93cf-fa1d7e2640ee {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1886.008211] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1886.008484] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59214f5f-80e0-43e4-8845-68591c514ceb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.016253] env[62619]: DEBUG oslo_vmware.api [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for the task: (returnval){ [ 1886.016253] env[62619]: value = "task-1778529" [ 1886.016253] env[62619]: _type = "Task" [ 1886.016253] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.025317] env[62619]: DEBUG oslo_vmware.api [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778529, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.029372] env[62619]: DEBUG nova.objects.instance [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'flavor' on Instance uuid 4c66bbdf-af6a-4705-8219-85cf19f8314e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1886.097446] env[62619]: DEBUG oslo_concurrency.lockutils [None req-781c0ad9-9c18-4d9d-943f-56d1aae5c62d tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-e84ee31b-42f6-44bd-be17-381a0796e37b" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.373s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.254542] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778527, 'name': CreateVM_Task, 'duration_secs': 0.359618} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.254794] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1886.255504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.255689] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.256090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1886.256631] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32e66f8c-f4ad-4d57-82fb-c729b74cd633 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.261669] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1886.261669] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520f3937-3774-6e68-69da-7cd5dd31b714" [ 1886.261669] env[62619]: _type = "Task" [ 1886.261669] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.270219] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520f3937-3774-6e68-69da-7cd5dd31b714, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.290914] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778528, 'name': ReconfigVM_Task, 'duration_secs': 0.166677} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.291207] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1886.291961] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8d7aaa-1af9-4121-84bb-b79d86847204 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.313656] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] volume-d3e2944a-ae36-4f80-82db-68a3de2d143e/volume-d3e2944a-ae36-4f80-82db-68a3de2d143e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1886.313921] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c956687-c71a-4a67-8266-77549d5e1019 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.332586] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1886.332586] env[62619]: value = "task-1778530" [ 1886.332586] env[62619]: _type = "Task" [ 1886.332586] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.341807] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.526815] env[62619]: DEBUG oslo_vmware.api [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778529, 'name': PowerOffVM_Task, 'duration_secs': 0.243849} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.527080] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1886.527329] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1886.527500] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38dc1d88-4798-4e65-9368-381984fe5f8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.533927] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.534125] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.534262] env[62619]: DEBUG nova.network.neutron [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1886.534464] env[62619]: DEBUG nova.objects.instance [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'info_cache' on Instance uuid 4c66bbdf-af6a-4705-8219-85cf19f8314e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1886.634592] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1886.634807] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1886.635059] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Deleting the datastore file [datastore1] b56800e8-1eab-4589-8d09-961f73973981 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1886.635244] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28a9e289-c997-438e-b73c-3755b4350ce6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.642018] env[62619]: DEBUG oslo_vmware.api [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for the task: (returnval){ [ 1886.642018] env[62619]: value = "task-1778532" [ 1886.642018] env[62619]: _type = "Task" [ 1886.642018] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.649370] env[62619]: DEBUG oslo_vmware.api [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.733987] env[62619]: DEBUG nova.network.neutron [req-67ebfcf6-4d20-49dd-bc7a-e669e27134be req-7c12ab11-76ec-4b11-983a-8c43e176c3f6 service nova] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updated VIF entry in instance network info cache for port ac4a39b7-453e-42d2-93cf-fa1d7e2640ee. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1886.734394] env[62619]: DEBUG nova.network.neutron [req-67ebfcf6-4d20-49dd-bc7a-e669e27134be req-7c12ab11-76ec-4b11-983a-8c43e176c3f6 service nova] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance_info_cache with network_info: [{"id": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "address": "fa:16:3e:d8:d1:15", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac4a39b7-45", "ovs_interfaceid": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.773888] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520f3937-3774-6e68-69da-7cd5dd31b714, 'name': SearchDatastore_Task, 'duration_secs': 0.014754} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.774215] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.774455] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1886.774920] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.774920] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.775058] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1886.775267] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40b916ec-c6aa-4883-8309-e861851fa9f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.786955] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1886.787186] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1886.787906] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ced070a3-7b21-4ef3-86ec-6fb13062d3ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.795717] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1886.795717] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52798b1e-3553-f95c-23ff-f1335765f017" [ 1886.795717] env[62619]: _type = "Task" [ 1886.795717] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.803165] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52798b1e-3553-f95c-23ff-f1335765f017, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.842325] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778530, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.892691] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.892848] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquired lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.892996] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1887.039997] env[62619]: DEBUG nova.objects.base [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Object Instance<4c66bbdf-af6a-4705-8219-85cf19f8314e> lazy-loaded attributes: flavor,info_cache {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1887.151774] env[62619]: DEBUG oslo_vmware.api [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Task: {'id': task-1778532, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147316} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.152011] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1887.152202] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1887.152378] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1887.152545] env[62619]: INFO nova.compute.manager [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] [instance: b56800e8-1eab-4589-8d09-961f73973981] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1887.152778] env[62619]: DEBUG oslo.service.loopingcall [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1887.152964] env[62619]: DEBUG nova.compute.manager [-] [instance: b56800e8-1eab-4589-8d09-961f73973981] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1887.153071] env[62619]: DEBUG nova.network.neutron [-] [instance: b56800e8-1eab-4589-8d09-961f73973981] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1887.237930] env[62619]: DEBUG oslo_concurrency.lockutils [req-67ebfcf6-4d20-49dd-bc7a-e669e27134be req-7c12ab11-76ec-4b11-983a-8c43e176c3f6 service nova] Releasing lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.306764] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52798b1e-3553-f95c-23ff-f1335765f017, 'name': SearchDatastore_Task, 'duration_secs': 0.008681} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.307453] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84c25e53-2e43-4c38-8354-2cf49d2528df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.313295] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1887.313295] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526df41d-733f-7200-a06f-f477996e95e4" [ 1887.313295] env[62619]: _type = "Task" [ 1887.313295] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.320907] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526df41d-733f-7200-a06f-f477996e95e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.341237] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778530, 'name': ReconfigVM_Task, 'duration_secs': 0.805618} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.341511] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Reconfigured VM instance instance-0000006d to attach disk [datastore1] volume-d3e2944a-ae36-4f80-82db-68a3de2d143e/volume-d3e2944a-ae36-4f80-82db-68a3de2d143e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1887.341787] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance '21d9fc7a-228e-4b33-8534-55285d4e6e96' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1887.550467] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-e4b11f7c-b59b-4267-a9da-8fbd14f25154" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.551773] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-e4b11f7c-b59b-4267-a9da-8fbd14f25154" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.769831] env[62619]: DEBUG nova.network.neutron [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [{"id": "b1a6212d-63f4-4343-9100-d88707a89c10", "address": "fa:16:3e:48:b2:0f", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a6212d-63", "ovs_interfaceid": "b1a6212d-63f4-4343-9100-d88707a89c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1887.823440] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526df41d-733f-7200-a06f-f477996e95e4, 'name': SearchDatastore_Task, 'duration_secs': 0.023898} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.824186] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.824186] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8ed2e4af-b484-4cd5-89c0-6ba60188127a/8ed2e4af-b484-4cd5-89c0-6ba60188127a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1887.824367] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8825005b-527a-4f22-b3b2-3b383ffac0ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.832246] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1887.832246] env[62619]: value = "task-1778533" [ 1887.832246] env[62619]: _type = "Task" [ 1887.832246] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.840509] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.848598] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80434339-8369-4fd2-9209-c936965fba7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.868488] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9cc835-6fab-435d-b7ad-fc4cc84e1ff4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.885951] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance '21d9fc7a-228e-4b33-8534-55285d4e6e96' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1887.894299] env[62619]: DEBUG nova.network.neutron [-] [instance: b56800e8-1eab-4589-8d09-961f73973981] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.029695] env[62619]: DEBUG nova.compute.manager [req-5fa3bbe1-c00f-4d46-895b-73af01cc0f6f req-54cf389d-7ca8-4c99-8a04-c498de79c7ca service nova] [instance: b56800e8-1eab-4589-8d09-961f73973981] Received event network-vif-deleted-55b17715-2239-4550-9b2c-78d7f930a2fd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1888.053185] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1888.053546] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1888.054343] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6ce9b6-8eb5-40ee-b073-970d7cc2549f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.078255] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0162c4-2031-4fa8-943f-779e93e4b48c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.108441] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Reconfiguring VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1888.109193] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a206cdce-3213-49c1-a81d-2e3b8e7c4e19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.130749] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1888.130749] env[62619]: value = "task-1778534" [ 1888.130749] env[62619]: _type = "Task" [ 1888.130749] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.139866] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.181773] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Updating instance_info_cache with network_info: [{"id": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "address": "fa:16:3e:da:01:a7", "network": {"id": "15b6b4a4-068e-4a3e-ad41-5bfb35e62ff9", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1165217213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb5d393c514d41f78fd4ea45d2f888a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9abe90c7-65", "ovs_interfaceid": "9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.273712] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.342388] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778533, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.420796} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.342637] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8ed2e4af-b484-4cd5-89c0-6ba60188127a/8ed2e4af-b484-4cd5-89c0-6ba60188127a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1888.342845] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1888.343101] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-010db4e4-9888-4741-b86e-4962f10ae38e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.349036] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1888.349036] env[62619]: value = "task-1778535" [ 1888.349036] env[62619]: _type = "Task" [ 1888.349036] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.356171] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778535, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.396880] env[62619]: INFO nova.compute.manager [-] [instance: b56800e8-1eab-4589-8d09-961f73973981] Took 1.24 seconds to deallocate network for instance. [ 1888.640740] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.684483] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Releasing lock "refresh_cache-8745aa7f-9848-4320-94b5-08b7e3bccf80" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.684747] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1888.684977] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.685153] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.685299] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.685461] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.685669] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.858662] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778535, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.056244} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.858880] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1888.859661] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91351ddb-7f7a-4518-87ea-8239b8f2e069 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.882700] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 8ed2e4af-b484-4cd5-89c0-6ba60188127a/8ed2e4af-b484-4cd5-89c0-6ba60188127a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1888.882960] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-031da36a-fb88-4636-87cf-4971f825ff55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.903618] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1888.903618] env[62619]: value = "task-1778536" [ 1888.903618] env[62619]: _type = "Task" [ 1888.903618] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.907167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.907418] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.907636] env[62619]: DEBUG nova.objects.instance [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lazy-loading 'resources' on Instance uuid b56800e8-1eab-4589-8d09-961f73973981 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1888.913525] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778536, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.141642] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.280123] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1889.280443] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c8a2902-4dd2-48db-9df8-a189dc7460a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.287759] env[62619]: DEBUG oslo_vmware.api [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1889.287759] env[62619]: value = "task-1778537" [ 1889.287759] env[62619]: _type = "Task" [ 1889.287759] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.295870] env[62619]: DEBUG oslo_vmware.api [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778537, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.415647] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778536, 'name': ReconfigVM_Task, 'duration_secs': 0.306727} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.415980] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 8ed2e4af-b484-4cd5-89c0-6ba60188127a/8ed2e4af-b484-4cd5-89c0-6ba60188127a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1889.416668] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75f7b255-a4f7-4cd4-889d-92314568338c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.423432] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1889.423432] env[62619]: value = "task-1778538" [ 1889.423432] env[62619]: _type = "Task" [ 1889.423432] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.431257] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778538, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.535701] env[62619]: DEBUG nova.network.neutron [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Port ce3520ea-f75e-4d6a-a27f-de90d6383823 binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1889.597545] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbe07f1-aa2c-4841-ab3d-10dbf84f20fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.605914] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae564cf-a4a7-4e8e-aa4d-d3ca463485fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.641029] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd550be-1d9a-4c9a-b91a-a9b63fa0c9b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.651595] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db34065-248e-4513-b8a0-549716337f1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.655375] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.674771] env[62619]: DEBUG nova.compute.provider_tree [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1889.798253] env[62619]: DEBUG oslo_vmware.api [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778537, 'name': PowerOnVM_Task, 'duration_secs': 0.402438} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.798645] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1889.798952] env[62619]: DEBUG nova.compute.manager [None req-8b084f8f-23eb-4ea4-bd01-23f7ea936f06 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1889.800126] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b44e55f-bf90-4401-911f-75e313c3739e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.934887] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778538, 'name': Rename_Task, 'duration_secs': 0.141586} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.935182] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1889.935420] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca6f517a-3924-41e8-85b3-62fe5eb52094 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.941473] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1889.941473] env[62619]: value = "task-1778539" [ 1889.941473] env[62619]: _type = "Task" [ 1889.941473] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.949563] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778539, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.148856] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.178047] env[62619]: DEBUG nova.scheduler.client.report [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1890.452070] env[62619]: DEBUG oslo_vmware.api [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778539, 'name': PowerOnVM_Task, 'duration_secs': 0.445017} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.452434] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1890.452639] env[62619]: INFO nova.compute.manager [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Took 6.99 seconds to spawn the instance on the hypervisor. [ 1890.452813] env[62619]: DEBUG nova.compute.manager [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1890.453699] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b0276a-b4e9-4d41-a505-95e77856a5f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.559260] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "21d9fc7a-228e-4b33-8534-55285d4e6e96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.559492] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.559662] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.649014] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.683428] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.702878] env[62619]: INFO nova.scheduler.client.report [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Deleted allocations for instance b56800e8-1eab-4589-8d09-961f73973981 [ 1890.973738] env[62619]: INFO nova.compute.manager [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Took 11.82 seconds to build instance. [ 1891.149553] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.211501] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6b090e7a-ccd4-4be0-9ba6-d63096fac2ee tempest-ServerAddressesNegativeTestJSON-1130335529 tempest-ServerAddressesNegativeTestJSON-1130335529-project-member] Lock "b56800e8-1eab-4589-8d09-961f73973981" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.723s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.340139] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "52b29fef-eab6-4541-a570-af9c0c021a75" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.340418] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.477241] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f855a00a-5c7d-43ab-a26b-26d03259737f tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.335s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.595999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.596235] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.596414] env[62619]: DEBUG nova.network.neutron [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1891.648644] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.844214] env[62619]: INFO nova.compute.manager [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Detaching volume 292d6452-98d6-460a-b9d5-6f63c5392ab1 [ 1891.885090] env[62619]: INFO nova.virt.block_device [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Attempting to driver detach volume 292d6452-98d6-460a-b9d5-6f63c5392ab1 from mountpoint /dev/sdb [ 1891.885381] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1891.885574] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369157', 'volume_id': '292d6452-98d6-460a-b9d5-6f63c5392ab1', 'name': 'volume-292d6452-98d6-460a-b9d5-6f63c5392ab1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '52b29fef-eab6-4541-a570-af9c0c021a75', 'attached_at': '', 'detached_at': '', 'volume_id': '292d6452-98d6-460a-b9d5-6f63c5392ab1', 'serial': '292d6452-98d6-460a-b9d5-6f63c5392ab1'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1891.886536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d52073e-47f2-4244-b20c-d2986b98172a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.908485] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489af1f8-a27a-4540-a4b8-3df588e6e580 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.915496] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b490ab5-3075-44e5-83aa-3f8129880292 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.939317] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb412a2-35da-4ade-bf68-93b1bec8f6d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.958197] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] The volume has not been displaced from its original location: [datastore1] volume-292d6452-98d6-460a-b9d5-6f63c5392ab1/volume-292d6452-98d6-460a-b9d5-6f63c5392ab1.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1891.964522] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1891.964928] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fed9093-93c0-47f3-ad7d-09418fec93e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.986454] env[62619]: DEBUG oslo_vmware.api [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1891.986454] env[62619]: value = "task-1778540" [ 1891.986454] env[62619]: _type = "Task" [ 1891.986454] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.994340] env[62619]: DEBUG oslo_vmware.api [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778540, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.148922] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.248695] env[62619]: DEBUG nova.compute.manager [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1892.495419] env[62619]: DEBUG oslo_vmware.api [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778540, 'name': ReconfigVM_Task, 'duration_secs': 0.446548} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.495686] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1892.500944] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e780d21f-f0f2-41bd-b526-b64aeec50ec0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.516823] env[62619]: DEBUG oslo_vmware.api [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1892.516823] env[62619]: value = "task-1778541" [ 1892.516823] env[62619]: _type = "Task" [ 1892.516823] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.525346] env[62619]: DEBUG oslo_vmware.api [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778541, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.526307] env[62619]: DEBUG nova.network.neutron [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance_info_cache with network_info: [{"id": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "address": "fa:16:3e:90:45:ed", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce3520ea-f7", "ovs_interfaceid": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.652857] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.771067] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.771491] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.027627] env[62619]: DEBUG oslo_vmware.api [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778541, 'name': ReconfigVM_Task, 'duration_secs': 0.271953} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.028021] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369157', 'volume_id': '292d6452-98d6-460a-b9d5-6f63c5392ab1', 'name': 'volume-292d6452-98d6-460a-b9d5-6f63c5392ab1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '52b29fef-eab6-4541-a570-af9c0c021a75', 'attached_at': '', 'detached_at': '', 'volume_id': '292d6452-98d6-460a-b9d5-6f63c5392ab1', 'serial': '292d6452-98d6-460a-b9d5-6f63c5392ab1'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1893.030660] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.153191] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.277139] env[62619]: INFO nova.compute.claims [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1893.545060] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7f75ee-e63d-44c1-9498-62ecb93867b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.552287] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c2459a-5ddd-4c93-9378-14d031ef5979 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.580591] env[62619]: DEBUG nova.objects.instance [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lazy-loading 'flavor' on Instance uuid 52b29fef-eab6-4541-a570-af9c0c021a75 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1893.651985] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.784553] env[62619]: INFO nova.compute.resource_tracker [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating resource usage from migration 268e53a3-9ffc-4793-999c-34fa996ac0a8 [ 1893.973697] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-909e1ff9-79ce-4981-bdd6-a6165dffb176 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.981537] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738e833f-a31d-4128-b570-0a5a21fe1f95 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.020829] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b4e360-62dd-47d1-999b-026f12e12c90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.028997] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de406451-b230-441e-8347-9f092e992f87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.042684] env[62619]: DEBUG nova.compute.provider_tree [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1894.152631] env[62619]: DEBUG oslo_vmware.api [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778534, 'name': ReconfigVM_Task, 'duration_secs': 5.926395} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.152881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.153103] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Reconfigured VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1894.395853] env[62619]: DEBUG nova.compute.manager [req-b3649920-f4a7-4d55-9bee-f2c2b3f0c77b req-6340c35e-173d-4855-a157-389b1fd632df service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received event network-vif-deleted-e4b11f7c-b59b-4267-a9da-8fbd14f25154 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1894.396095] env[62619]: INFO nova.compute.manager [req-b3649920-f4a7-4d55-9bee-f2c2b3f0c77b req-6340c35e-173d-4855-a157-389b1fd632df service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Neutron deleted interface e4b11f7c-b59b-4267-a9da-8fbd14f25154; detaching it from the instance and deleting it from the info cache [ 1894.396383] env[62619]: DEBUG nova.network.neutron [req-b3649920-f4a7-4d55-9bee-f2c2b3f0c77b req-6340c35e-173d-4855-a157-389b1fd632df service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [{"id": "0364c1be-595c-4984-9173-39fd5163c9ad", "address": "fa:16:3e:d1:3c:33", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364c1be-59", "ovs_interfaceid": "0364c1be-595c-4984-9173-39fd5163c9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e84ee31b-42f6-44bd-be17-381a0796e37b", "address": "fa:16:3e:e8:f1:56", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84ee31b-42", "ovs_interfaceid": "e84ee31b-42f6-44bd-be17-381a0796e37b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.546061] env[62619]: DEBUG nova.scheduler.client.report [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1894.589476] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d96b3545-66e1-4aa3-954e-54a03f03aebc tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.249s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.656416] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a360043-aa35-47f6-a35a-51f46e9c795b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.676573] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2b860f-d533-4206-848b-13a139dc7f7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.684394] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance '21d9fc7a-228e-4b33-8534-55285d4e6e96' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1894.898688] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3649920-f4a7-4d55-9bee-f2c2b3f0c77b req-6340c35e-173d-4855-a157-389b1fd632df service nova] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.899475] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3649920-f4a7-4d55-9bee-f2c2b3f0c77b req-6340c35e-173d-4855-a157-389b1fd632df service nova] Acquired lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.899858] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6a97ac-80ed-451d-a158-2c30e017abbe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.920048] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3649920-f4a7-4d55-9bee-f2c2b3f0c77b req-6340c35e-173d-4855-a157-389b1fd632df service nova] Releasing lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.920180] env[62619]: WARNING nova.compute.manager [req-b3649920-f4a7-4d55-9bee-f2c2b3f0c77b req-6340c35e-173d-4855-a157-389b1fd632df service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Detach interface failed, port_id=e4b11f7c-b59b-4267-a9da-8fbd14f25154, reason: No device with interface-id e4b11f7c-b59b-4267-a9da-8fbd14f25154 exists on VM: nova.exception.NotFound: No device with interface-id e4b11f7c-b59b-4267-a9da-8fbd14f25154 exists on VM [ 1895.051079] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.279s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.051339] env[62619]: INFO nova.compute.manager [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Migrating [ 1895.189750] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1895.190303] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98f78990-5642-463a-b9dd-32acd26690a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.201135] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1895.201135] env[62619]: value = "task-1778542" [ 1895.201135] env[62619]: _type = "Task" [ 1895.201135] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.210110] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.372529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.372724] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.372906] env[62619]: DEBUG nova.network.neutron [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1895.427592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.427592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.427592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.428141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.428141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.430114] env[62619]: INFO nova.compute.manager [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Terminating instance [ 1895.566794] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.566986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.567175] env[62619]: DEBUG nova.network.neutron [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1895.697116] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "52b29fef-eab6-4541-a570-af9c0c021a75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.697358] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.697567] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "52b29fef-eab6-4541-a570-af9c0c021a75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.697745] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.698019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.700123] env[62619]: INFO nova.compute.manager [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Terminating instance [ 1895.713036] env[62619]: DEBUG oslo_vmware.api [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778542, 'name': PowerOnVM_Task, 'duration_secs': 0.42006} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.713036] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1895.713176] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2ab03f-e3c3-4c59-9f60-23a69049c548 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance '21d9fc7a-228e-4b33-8534-55285d4e6e96' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1895.934161] env[62619]: DEBUG nova.compute.manager [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1895.934396] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1895.935288] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4679fdca-583e-40c9-ae95-5a1733bd85e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.946026] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1895.946146] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c46dc9f-fff7-40a7-8008-ddba5f5eb77d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.952440] env[62619]: DEBUG oslo_vmware.api [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1895.952440] env[62619]: value = "task-1778543" [ 1895.952440] env[62619]: _type = "Task" [ 1895.952440] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.961526] env[62619]: DEBUG oslo_vmware.api [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.125525] env[62619]: INFO nova.network.neutron [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Port e84ee31b-42f6-44bd-be17-381a0796e37b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1896.125906] env[62619]: DEBUG nova.network.neutron [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [{"id": "0364c1be-595c-4984-9173-39fd5163c9ad", "address": "fa:16:3e:d1:3c:33", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364c1be-59", "ovs_interfaceid": "0364c1be-595c-4984-9173-39fd5163c9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.206601] env[62619]: DEBUG nova.compute.manager [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1896.206842] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1896.208684] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21af64cb-b98a-4568-a0f0-ed1648b5cd33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.217393] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1896.217705] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a75e71d2-b56e-48e6-ad40-e8a3cd05dc24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.225479] env[62619]: DEBUG oslo_vmware.api [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1896.225479] env[62619]: value = "task-1778544" [ 1896.225479] env[62619]: _type = "Task" [ 1896.225479] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.234056] env[62619]: DEBUG oslo_vmware.api [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.287257] env[62619]: DEBUG nova.network.neutron [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance_info_cache with network_info: [{"id": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "address": "fa:16:3e:d8:d1:15", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac4a39b7-45", "ovs_interfaceid": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.462463] env[62619]: DEBUG oslo_vmware.api [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778543, 'name': PowerOffVM_Task, 'duration_secs': 0.306158} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.462773] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1896.462943] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1896.463344] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9f50643-4957-4aec-852d-6b5595debcf6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.505456] env[62619]: DEBUG nova.compute.manager [req-99aa9b77-59ea-4a7e-9410-a01d85f61349 req-42d7e937-779b-4296-8263-90c350470fa1 service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received event network-vif-deleted-e84ee31b-42f6-44bd-be17-381a0796e37b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1896.579054] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1896.579054] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1896.579054] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleting the datastore file [datastore1] 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1896.579330] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-671a4edc-d4bd-4061-a79b-cce5009a1802 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.586142] env[62619]: DEBUG oslo_vmware.api [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1896.586142] env[62619]: value = "task-1778546" [ 1896.586142] env[62619]: _type = "Task" [ 1896.586142] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.594323] env[62619]: DEBUG oslo_vmware.api [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.629200] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.736977] env[62619]: DEBUG oslo_vmware.api [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778544, 'name': PowerOffVM_Task, 'duration_secs': 0.288716} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.737362] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1896.737608] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1896.737932] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50039def-f796-45b6-b5ab-24deaa9078af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.790707] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.816298] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1896.816619] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1896.816870] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Deleting the datastore file [datastore1] 52b29fef-eab6-4541-a570-af9c0c021a75 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1896.817207] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f12d710-8bf2-49fc-ac50-ed10e4a52bc6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.823365] env[62619]: DEBUG oslo_vmware.api [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1896.823365] env[62619]: value = "task-1778548" [ 1896.823365] env[62619]: _type = "Task" [ 1896.823365] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.831088] env[62619]: DEBUG oslo_vmware.api [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.100030] env[62619]: DEBUG oslo_vmware.api [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15431} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.100030] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1897.100247] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1897.100474] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1897.100704] env[62619]: INFO nova.compute.manager [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1897.101016] env[62619]: DEBUG oslo.service.loopingcall [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1897.101249] env[62619]: DEBUG nova.compute.manager [-] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1897.101370] env[62619]: DEBUG nova.network.neutron [-] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1897.133242] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1fa947c5-dea4-4add-b262-f44f92796bd7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e-e4b11f7c-b59b-4267-a9da-8fbd14f25154" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.582s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.176544] env[62619]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port e84ee31b-42f6-44bd-be17-381a0796e37b could not be found.", "detail": ""}} {{(pid=62619) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1897.176785] env[62619]: DEBUG nova.network.neutron [-] Unable to show port e84ee31b-42f6-44bd-be17-381a0796e37b as it no longer exists. {{(pid=62619) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1897.335142] env[62619]: DEBUG oslo_vmware.api [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12896} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.335351] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1897.335533] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1897.335724] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1897.335870] env[62619]: INFO nova.compute.manager [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1897.336135] env[62619]: DEBUG oslo.service.loopingcall [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1897.336326] env[62619]: DEBUG nova.compute.manager [-] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1897.336421] env[62619]: DEBUG nova.network.neutron [-] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1897.706039] env[62619]: DEBUG nova.compute.manager [req-abaecae0-7be0-44bd-b3da-556162cc184a req-bf636c66-41bf-4b35-a143-4ce32cbd7ead service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Received event network-vif-deleted-0364c1be-595c-4984-9173-39fd5163c9ad {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1897.706335] env[62619]: INFO nova.compute.manager [req-abaecae0-7be0-44bd-b3da-556162cc184a req-bf636c66-41bf-4b35-a143-4ce32cbd7ead service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Neutron deleted interface 0364c1be-595c-4984-9173-39fd5163c9ad; detaching it from the instance and deleting it from the info cache [ 1897.706539] env[62619]: DEBUG nova.network.neutron [req-abaecae0-7be0-44bd-b3da-556162cc184a req-bf636c66-41bf-4b35-a143-4ce32cbd7ead service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.060268] env[62619]: DEBUG nova.network.neutron [-] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.209703] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-590151a3-4d9f-4e68-972e-e2f5e37d3a7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.221574] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35453ca7-b9c0-4b51-8631-c5e6960d5bab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.257330] env[62619]: DEBUG nova.compute.manager [req-abaecae0-7be0-44bd-b3da-556162cc184a req-bf636c66-41bf-4b35-a143-4ce32cbd7ead service nova] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Detach interface failed, port_id=0364c1be-595c-4984-9173-39fd5163c9ad, reason: Instance 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1898.297430] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "21d9fc7a-228e-4b33-8534-55285d4e6e96" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.297690] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.297875] env[62619]: DEBUG nova.compute.manager [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Going to confirm migration 7 {{(pid=62619) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1898.307808] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340d985b-2707-4c6c-8d13-5151ea04845b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.327178] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance '8ed2e4af-b484-4cd5-89c0-6ba60188127a' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1898.359212] env[62619]: DEBUG nova.network.neutron [-] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.529074] env[62619]: DEBUG nova.compute.manager [req-e753c0b1-ffa5-4dbf-91e7-352590a250c0 req-33425249-ee61-4a71-9172-55726830d2d5 service nova] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Received event network-vif-deleted-165f6c4a-b24e-4c32-845f-891bf7478563 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1898.561062] env[62619]: INFO nova.compute.manager [-] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Took 1.46 seconds to deallocate network for instance. [ 1898.833839] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1898.835060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.835230] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquired lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.835400] env[62619]: DEBUG nova.network.neutron [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1898.835572] env[62619]: DEBUG nova.objects.instance [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lazy-loading 'info_cache' on Instance uuid 21d9fc7a-228e-4b33-8534-55285d4e6e96 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1898.837147] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3b8ab3d-10df-4407-b51c-d4ef4f5e4372 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.845346] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1898.845346] env[62619]: value = "task-1778549" [ 1898.845346] env[62619]: _type = "Task" [ 1898.845346] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.854550] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.863464] env[62619]: INFO nova.compute.manager [-] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Took 1.53 seconds to deallocate network for instance. [ 1899.068456] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.068821] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.069091] env[62619]: DEBUG nova.objects.instance [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'resources' on Instance uuid 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1899.355838] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778549, 'name': PowerOffVM_Task, 'duration_secs': 0.216722} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.356143] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1899.356337] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance '8ed2e4af-b484-4cd5-89c0-6ba60188127a' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1899.369830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.740722] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a9be06-60f8-4fa1-93ec-223d515ca346 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.748379] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea307cd-a49e-4292-9f3b-68c212cacabe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.452631] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1900.452932] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1900.453028] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1900.453211] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1900.453352] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1900.453494] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1900.453689] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1900.453843] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1900.453999] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1900.454169] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1900.454333] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1900.459745] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1a5aa08-523c-49f0-b1d8-3a18269b3411 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.469912] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d049c9-4f25-4038-aa25-3ddaada2c6dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.479380] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8d274d-05d2-448a-8d7d-e41110653bb9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.482959] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1900.482959] env[62619]: value = "task-1778550" [ 1900.482959] env[62619]: _type = "Task" [ 1900.482959] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.493568] env[62619]: DEBUG nova.compute.provider_tree [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1900.497920] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778550, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.682853] env[62619]: DEBUG nova.network.neutron [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance_info_cache with network_info: [{"id": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "address": "fa:16:3e:90:45:ed", "network": {"id": "052072b9-07fa-4604-8b64-b0ab3fb58510", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-943251436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c24c9d49d8d4104a0868f126eb3a26e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce3520ea-f7", "ovs_interfaceid": "ce3520ea-f75e-4d6a-a27f-de90d6383823", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.993065] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778550, 'name': ReconfigVM_Task, 'duration_secs': 0.160244} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.993065] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance '8ed2e4af-b484-4cd5-89c0-6ba60188127a' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1900.999436] env[62619]: DEBUG nova.scheduler.client.report [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1901.185813] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Releasing lock "refresh_cache-21d9fc7a-228e-4b33-8534-55285d4e6e96" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.186130] env[62619]: DEBUG nova.objects.instance [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lazy-loading 'migration_context' on Instance uuid 21d9fc7a-228e-4b33-8534-55285d4e6e96 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1901.499450] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1901.499785] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1901.499915] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1901.500066] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1901.500215] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1901.500356] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1901.500556] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1901.500709] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1901.500874] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1901.501108] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1901.501311] env[62619]: DEBUG nova.virt.hardware [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1901.506573] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1901.507235] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.439s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.509237] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-930e73c2-5257-4de7-9e76-89c2b52f78a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.522227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.152s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.522401] env[62619]: DEBUG nova.objects.instance [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lazy-loading 'resources' on Instance uuid 52b29fef-eab6-4541-a570-af9c0c021a75 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1901.529511] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1901.529511] env[62619]: value = "task-1778551" [ 1901.529511] env[62619]: _type = "Task" [ 1901.529511] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.539083] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778551, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.539930] env[62619]: INFO nova.scheduler.client.report [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleted allocations for instance 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e [ 1901.690060] env[62619]: DEBUG nova.objects.base [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Object Instance<21d9fc7a-228e-4b33-8534-55285d4e6e96> lazy-loaded attributes: info_cache,migration_context {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1901.690458] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa841d3c-f8bd-48d9-9516-8e693dd06562 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.708422] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-713d31be-1e02-4dcc-86fc-5a6955780487 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.713623] env[62619]: DEBUG oslo_vmware.api [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1901.713623] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52108740-8f48-d353-fae9-f28aec7a96fe" [ 1901.713623] env[62619]: _type = "Task" [ 1901.713623] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.721190] env[62619]: DEBUG oslo_vmware.api [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52108740-8f48-d353-fae9-f28aec7a96fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.039917] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778551, 'name': ReconfigVM_Task, 'duration_secs': 0.161549} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.040224] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1902.040970] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3671c29f-32d8-4064-8cd2-2e4e0c69858d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.047881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6d4974ea-88c7-4dc8-b5b6-0069091514b7 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.620s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.067108] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 8ed2e4af-b484-4cd5-89c0-6ba60188127a/8ed2e4af-b484-4cd5-89c0-6ba60188127a.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1902.070186] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc50a031-ed47-4154-aeb1-70bce69dcb38 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.088385] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1902.088385] env[62619]: value = "task-1778552" [ 1902.088385] env[62619]: _type = "Task" [ 1902.088385] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.098474] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778552, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.211404] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40266a85-23bb-4251-8de5-301ac1dbce13 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.225243] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca16d10-0240-46fe-8cca-8f6ac94fac76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.228277] env[62619]: DEBUG oslo_vmware.api [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52108740-8f48-d353-fae9-f28aec7a96fe, 'name': SearchDatastore_Task, 'duration_secs': 0.006899} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.228595] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.255599] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ce89e0-ffa9-48ea-b87d-d7119ac5aaf0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.262322] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb144eba-d4eb-4e07-94fd-47f213a1bd96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.275114] env[62619]: DEBUG nova.compute.provider_tree [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1902.598487] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778552, 'name': ReconfigVM_Task, 'duration_secs': 0.250892} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.598845] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 8ed2e4af-b484-4cd5-89c0-6ba60188127a/8ed2e4af-b484-4cd5-89c0-6ba60188127a.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1902.599382] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance '8ed2e4af-b484-4cd5-89c0-6ba60188127a' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1902.778125] env[62619]: DEBUG nova.scheduler.client.report [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1903.105480] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012ab0a2-b19a-42e5-ac91-679ab5d81e0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.125684] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768c8e0b-fe56-43f1-81d8-4cf61493adfd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.144503] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance '8ed2e4af-b484-4cd5-89c0-6ba60188127a' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1903.283126] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.761s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.285468] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.057s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.311097] env[62619]: INFO nova.scheduler.client.report [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Deleted allocations for instance 52b29fef-eab6-4541-a570-af9c0c021a75 [ 1903.723706] env[62619]: DEBUG nova.network.neutron [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Port ac4a39b7-453e-42d2-93cf-fa1d7e2640ee binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1903.819827] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7978ec31-d5bf-4619-89d0-d014d1ee9692 tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "52b29fef-eab6-4541-a570-af9c0c021a75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.122s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.825125] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.825356] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.826380] env[62619]: INFO nova.compute.manager [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Shelving [ 1903.934445] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983ee808-1879-4243-ac25-00fd0f156e69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.942696] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9db35a-e580-4265-9817-b74ef32e8789 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.988717] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57bae71-d75d-412a-900f-7d070d638ad5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.997718] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79215b9-6d84-45be-8dd5-fca1725c9486 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.014139] env[62619]: DEBUG nova.compute.provider_tree [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1904.518447] env[62619]: DEBUG nova.scheduler.client.report [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1904.661046] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.661348] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.744465] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.744788] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.744859] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.833921] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1904.834384] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53eb58f4-9292-4c20-98ae-4e858feb4e7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.842318] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1904.842318] env[62619]: value = "task-1778554" [ 1904.842318] env[62619]: _type = "Task" [ 1904.842318] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.850271] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778554, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.163539] env[62619]: DEBUG nova.compute.manager [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1905.352528] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778554, 'name': PowerOffVM_Task, 'duration_secs': 0.208491} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.352739] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1905.353515] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9be7ff-4839-43d7-98d9-3882a235f85c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.371870] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738a4fa2-f19a-45a8-9e85-51193bb30e12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.530148] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.245s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.686534] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.686837] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.688497] env[62619]: INFO nova.compute.claims [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1905.778734] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.778960] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.779117] env[62619]: DEBUG nova.network.neutron [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1905.881848] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1905.882202] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-da2e4333-8874-4035-9cf0-2ee909020aa4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.890783] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1905.890783] env[62619]: value = "task-1778555" [ 1905.890783] env[62619]: _type = "Task" [ 1905.890783] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.899321] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778555, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.067903] env[62619]: INFO nova.compute.manager [None req-77eb30de-d153-4a4e-bb4a-6ee3754d5b20 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Get console output [ 1906.068280] env[62619]: WARNING nova.virt.vmwareapi.driver [None req-77eb30de-d153-4a4e-bb4a-6ee3754d5b20 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] The console log is missing. Check your VSPC configuration [ 1906.086091] env[62619]: INFO nova.scheduler.client.report [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted allocation for migration 10114fb1-8a4f-4bb5-b620-f74b7b500b7d [ 1906.400122] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778555, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.500696] env[62619]: DEBUG nova.network.neutron [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance_info_cache with network_info: [{"id": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "address": "fa:16:3e:d8:d1:15", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac4a39b7-45", "ovs_interfaceid": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.592063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5cbbccf0-193b-46ff-a92c-d9442ffdae7e tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.294s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.844521] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db23c647-e8e9-46be-9d5f-9b1e69cb7ad6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.851961] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bba5ab-0684-4338-ae2d-bd69c5df592d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.882016] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4966ba-95f0-4221-bb9c-35e049618196 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.889178] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b03272-14d7-4a73-bc62-eac30aaead49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.904489] env[62619]: DEBUG nova.compute.provider_tree [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1906.908594] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778555, 'name': CreateSnapshot_Task, 'duration_secs': 0.585174} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.909096] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1906.909823] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d95f63-aef7-4f45-a0d0-01cc4b6021bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.003585] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.410332] env[62619]: DEBUG nova.scheduler.client.report [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1907.426902] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1907.427738] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b441856d-ca76-4445-9e1f-1dec5616ca70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.437899] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1907.437899] env[62619]: value = "task-1778556" [ 1907.437899] env[62619]: _type = "Task" [ 1907.437899] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.446662] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778556, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.526301] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9ac68c-6029-4d4f-8491-e2bbcf9c1c61 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.545902] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faea0777-218b-4ba4-886d-3d74456b6ac8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.553152] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance '8ed2e4af-b484-4cd5-89c0-6ba60188127a' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1907.915897] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.916605] env[62619]: DEBUG nova.compute.manager [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1907.949780] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778556, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.059061] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1908.059324] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c67e45c6-c81d-4e03-b31d-d486d73830a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.066124] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1908.066124] env[62619]: value = "task-1778557" [ 1908.066124] env[62619]: _type = "Task" [ 1908.066124] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.074042] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.423144] env[62619]: DEBUG nova.compute.utils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1908.425802] env[62619]: DEBUG nova.compute.manager [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1908.426054] env[62619]: DEBUG nova.network.neutron [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1908.450212] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778556, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.482786] env[62619]: DEBUG nova.policy [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8d937f303584c3daea133a6283fd5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23d77e73a09d492695fbfe6ac2c93371', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1908.576047] env[62619]: DEBUG oslo_vmware.api [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778557, 'name': PowerOnVM_Task, 'duration_secs': 0.429194} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.576177] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1908.576318] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-67a63d28-5bcf-4f2e-88ba-3408241bb619 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance '8ed2e4af-b484-4cd5-89c0-6ba60188127a' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1908.816712] env[62619]: DEBUG nova.network.neutron [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Successfully created port: 35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1908.929915] env[62619]: DEBUG nova.compute.manager [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1908.961065] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778556, 'name': CloneVM_Task, 'duration_secs': 1.130273} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.961474] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Created linked-clone VM from snapshot [ 1908.962328] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7986656-9460-49d6-b8ae-cd6c79eaa9a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.972127] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Uploading image 8f8142ab-6142-4a84-93a9-2b4bd7e31086 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1908.999878] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1908.999878] env[62619]: value = "vm-369178" [ 1908.999878] env[62619]: _type = "VirtualMachine" [ 1908.999878] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1909.000379] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2c1f3e09-1e22-4398-be5a-5e9007719172 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.008470] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lease: (returnval){ [ 1909.008470] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525fc303-ee3e-4246-be0d-2646626ad458" [ 1909.008470] env[62619]: _type = "HttpNfcLease" [ 1909.008470] env[62619]: } obtained for exporting VM: (result){ [ 1909.008470] env[62619]: value = "vm-369178" [ 1909.008470] env[62619]: _type = "VirtualMachine" [ 1909.008470] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1909.008865] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the lease: (returnval){ [ 1909.008865] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525fc303-ee3e-4246-be0d-2646626ad458" [ 1909.008865] env[62619]: _type = "HttpNfcLease" [ 1909.008865] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1909.015974] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1909.015974] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525fc303-ee3e-4246-be0d-2646626ad458" [ 1909.015974] env[62619]: _type = "HttpNfcLease" [ 1909.015974] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1909.119141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "e302e431-1f95-4ab5-bfca-59450fd887f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.119328] env[62619]: DEBUG oslo_concurrency.lockutils [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "e302e431-1f95-4ab5-bfca-59450fd887f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.119541] env[62619]: DEBUG oslo_concurrency.lockutils [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "e302e431-1f95-4ab5-bfca-59450fd887f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.119729] env[62619]: DEBUG oslo_concurrency.lockutils [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "e302e431-1f95-4ab5-bfca-59450fd887f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.119897] env[62619]: DEBUG oslo_concurrency.lockutils [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "e302e431-1f95-4ab5-bfca-59450fd887f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.122151] env[62619]: INFO nova.compute.manager [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Terminating instance [ 1909.516990] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1909.516990] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525fc303-ee3e-4246-be0d-2646626ad458" [ 1909.516990] env[62619]: _type = "HttpNfcLease" [ 1909.516990] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1909.517295] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1909.517295] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525fc303-ee3e-4246-be0d-2646626ad458" [ 1909.517295] env[62619]: _type = "HttpNfcLease" [ 1909.517295] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1909.517982] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f59787-7618-4990-9219-32b07e203ae7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.525136] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5214b389-ef5a-b235-44e0-9bd1076434d4/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1909.525309] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5214b389-ef5a-b235-44e0-9bd1076434d4/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1909.610900] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-53efa87b-174a-421f-84b9-b8a547c32554 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.626646] env[62619]: DEBUG nova.compute.manager [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1909.627015] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1909.628121] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a34073e-c7c4-478c-9ba5-a212aad4482e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.638813] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1909.643528] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-611c407a-3b90-4dc4-8a5c-fe494f1b5c84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.650546] env[62619]: DEBUG oslo_vmware.api [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1909.650546] env[62619]: value = "task-1778559" [ 1909.650546] env[62619]: _type = "Task" [ 1909.650546] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.658511] env[62619]: DEBUG oslo_vmware.api [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778559, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.941014] env[62619]: DEBUG nova.compute.manager [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1909.965290] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1909.966089] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1909.966265] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1909.966461] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1909.966665] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1909.966825] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1909.967062] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1909.967236] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1909.967593] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1909.967857] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1909.968156] env[62619]: DEBUG nova.virt.hardware [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1909.969121] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb81d017-8718-4de3-947e-4039d58f0315 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.978996] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a775a692-b97c-4e9d-838f-c2fc4b616670 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.161148] env[62619]: DEBUG oslo_vmware.api [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778559, 'name': PowerOffVM_Task, 'duration_secs': 0.261464} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.161546] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1910.161764] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1910.162112] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17173522-16bd-40d7-914a-f82213f68344 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.383450] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1910.384174] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1910.384174] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Deleting the datastore file [datastore1] e302e431-1f95-4ab5-bfca-59450fd887f0 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1910.384411] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32ce089e-b77f-4a3b-88d5-2d93f6abf058 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.391799] env[62619]: DEBUG oslo_vmware.api [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1910.391799] env[62619]: value = "task-1778561" [ 1910.391799] env[62619]: _type = "Task" [ 1910.391799] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.401460] env[62619]: DEBUG oslo_vmware.api [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.615797] env[62619]: DEBUG nova.compute.manager [req-258f54bd-c6be-49c5-b2ba-a3845ce991a8 req-ffb16272-b351-467d-b800-736e77b6d393 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received event network-vif-plugged-35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1910.615971] env[62619]: DEBUG oslo_concurrency.lockutils [req-258f54bd-c6be-49c5-b2ba-a3845ce991a8 req-ffb16272-b351-467d-b800-736e77b6d393 service nova] Acquiring lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.616224] env[62619]: DEBUG oslo_concurrency.lockutils [req-258f54bd-c6be-49c5-b2ba-a3845ce991a8 req-ffb16272-b351-467d-b800-736e77b6d393 service nova] Lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.616356] env[62619]: DEBUG oslo_concurrency.lockutils [req-258f54bd-c6be-49c5-b2ba-a3845ce991a8 req-ffb16272-b351-467d-b800-736e77b6d393 service nova] Lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.617238] env[62619]: DEBUG nova.compute.manager [req-258f54bd-c6be-49c5-b2ba-a3845ce991a8 req-ffb16272-b351-467d-b800-736e77b6d393 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] No waiting events found dispatching network-vif-plugged-35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1910.617238] env[62619]: WARNING nova.compute.manager [req-258f54bd-c6be-49c5-b2ba-a3845ce991a8 req-ffb16272-b351-467d-b800-736e77b6d393 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received unexpected event network-vif-plugged-35375282-c697-45eb-a87c-d85555a9012b for instance with vm_state building and task_state spawning. [ 1910.903361] env[62619]: DEBUG oslo_vmware.api [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292092} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.903660] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1910.903870] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1910.904070] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1910.904347] env[62619]: INFO nova.compute.manager [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1910.904632] env[62619]: DEBUG oslo.service.loopingcall [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1910.904874] env[62619]: DEBUG nova.compute.manager [-] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1910.905030] env[62619]: DEBUG nova.network.neutron [-] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1911.196641] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.197079] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.197325] env[62619]: DEBUG nova.compute.manager [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Going to confirm migration 8 {{(pid=62619) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1911.204075] env[62619]: DEBUG nova.compute.manager [req-ce75336e-6826-4ddd-92ab-005e125470a9 req-0db7f155-e51b-4fc8-a3ae-acc18c1028e0 service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Received event network-vif-deleted-279a158f-38d3-41bb-ab72-22a80ceca030 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1911.204408] env[62619]: INFO nova.compute.manager [req-ce75336e-6826-4ddd-92ab-005e125470a9 req-0db7f155-e51b-4fc8-a3ae-acc18c1028e0 service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Neutron deleted interface 279a158f-38d3-41bb-ab72-22a80ceca030; detaching it from the instance and deleting it from the info cache [ 1911.204590] env[62619]: DEBUG nova.network.neutron [req-ce75336e-6826-4ddd-92ab-005e125470a9 req-0db7f155-e51b-4fc8-a3ae-acc18c1028e0 service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.219263] env[62619]: DEBUG nova.network.neutron [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Successfully updated port: 35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1911.241145] env[62619]: DEBUG nova.compute.manager [req-5c09135d-d5c5-4d99-9af1-8219244dde30 req-f4ab05d5-18f2-4bba-976f-30f101478e78 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received event network-changed-35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1911.241719] env[62619]: DEBUG nova.compute.manager [req-5c09135d-d5c5-4d99-9af1-8219244dde30 req-f4ab05d5-18f2-4bba-976f-30f101478e78 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing instance network info cache due to event network-changed-35375282-c697-45eb-a87c-d85555a9012b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1911.242367] env[62619]: DEBUG oslo_concurrency.lockutils [req-5c09135d-d5c5-4d99-9af1-8219244dde30 req-f4ab05d5-18f2-4bba-976f-30f101478e78 service nova] Acquiring lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.242367] env[62619]: DEBUG oslo_concurrency.lockutils [req-5c09135d-d5c5-4d99-9af1-8219244dde30 req-f4ab05d5-18f2-4bba-976f-30f101478e78 service nova] Acquired lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.242367] env[62619]: DEBUG nova.network.neutron [req-5c09135d-d5c5-4d99-9af1-8219244dde30 req-f4ab05d5-18f2-4bba-976f-30f101478e78 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing network info cache for port 35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1911.676684] env[62619]: DEBUG nova.network.neutron [-] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.707355] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79686543-7dc6-4e53-9823-ef5e459755e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.717804] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764ebb0e-cb10-4b24-9f34-d4303ff0e834 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.729698] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.755797] env[62619]: DEBUG nova.compute.manager [req-ce75336e-6826-4ddd-92ab-005e125470a9 req-0db7f155-e51b-4fc8-a3ae-acc18c1028e0 service nova] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Detach interface failed, port_id=279a158f-38d3-41bb-ab72-22a80ceca030, reason: Instance e302e431-1f95-4ab5-bfca-59450fd887f0 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1911.787561] env[62619]: DEBUG nova.network.neutron [req-5c09135d-d5c5-4d99-9af1-8219244dde30 req-f4ab05d5-18f2-4bba-976f-30f101478e78 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1911.808237] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.808417] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquired lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.808608] env[62619]: DEBUG nova.network.neutron [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1911.808977] env[62619]: DEBUG nova.objects.instance [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lazy-loading 'info_cache' on Instance uuid 8ed2e4af-b484-4cd5-89c0-6ba60188127a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1911.867886] env[62619]: DEBUG nova.network.neutron [req-5c09135d-d5c5-4d99-9af1-8219244dde30 req-f4ab05d5-18f2-4bba-976f-30f101478e78 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.180359] env[62619]: INFO nova.compute.manager [-] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Took 1.28 seconds to deallocate network for instance. [ 1912.370870] env[62619]: DEBUG oslo_concurrency.lockutils [req-5c09135d-d5c5-4d99-9af1-8219244dde30 req-f4ab05d5-18f2-4bba-976f-30f101478e78 service nova] Releasing lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.371614] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.371793] env[62619]: DEBUG nova.network.neutron [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1912.687764] env[62619]: DEBUG oslo_concurrency.lockutils [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.688040] env[62619]: DEBUG oslo_concurrency.lockutils [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.688263] env[62619]: DEBUG nova.objects.instance [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lazy-loading 'resources' on Instance uuid e302e431-1f95-4ab5-bfca-59450fd887f0 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1912.856696] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.857024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.857249] env[62619]: INFO nova.compute.manager [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Shelving [ 1912.916105] env[62619]: DEBUG nova.network.neutron [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1913.023999] env[62619]: DEBUG nova.network.neutron [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance_info_cache with network_info: [{"id": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "address": "fa:16:3e:d8:d1:15", "network": {"id": "8719791a-28fb-4108-b120-fcdc51c572ea", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1546656839-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2377a52a195d4f0b9181207ab5741734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac4a39b7-45", "ovs_interfaceid": "ac4a39b7-453e-42d2-93cf-fa1d7e2640ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.058304] env[62619]: DEBUG nova.network.neutron [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [{"id": "35375282-c697-45eb-a87c-d85555a9012b", "address": "fa:16:3e:87:9d:97", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35375282-c6", "ovs_interfaceid": "35375282-c697-45eb-a87c-d85555a9012b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.345094] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96bfce6c-9336-4b01-b906-50fca37b50a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.353293] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf579a2-388e-4cc0-a3e2-3ee69de45570 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.384467] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd31737-5590-478a-9ba4-8eb8056deebe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.396111] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce2d5af-2d2f-481f-849d-5ae76a11b92b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.411618] env[62619]: DEBUG nova.compute.provider_tree [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1913.527497] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Releasing lock "refresh_cache-8ed2e4af-b484-4cd5-89c0-6ba60188127a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.527717] env[62619]: DEBUG nova.objects.instance [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lazy-loading 'migration_context' on Instance uuid 8ed2e4af-b484-4cd5-89c0-6ba60188127a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1913.561588] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.562284] env[62619]: DEBUG nova.compute.manager [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Instance network_info: |[{"id": "35375282-c697-45eb-a87c-d85555a9012b", "address": "fa:16:3e:87:9d:97", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35375282-c6", "ovs_interfaceid": "35375282-c697-45eb-a87c-d85555a9012b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1913.562892] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:9d:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9aa05ef8-c7bb-4af5-983f-bfa0f3f88223', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35375282-c697-45eb-a87c-d85555a9012b', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1913.571985] env[62619]: DEBUG oslo.service.loopingcall [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1913.572558] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1913.572795] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f0e2d8a-12c8-4ccc-a961-bc41fd339bc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.593400] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1913.593400] env[62619]: value = "task-1778562" [ 1913.593400] env[62619]: _type = "Task" [ 1913.593400] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.601953] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778562, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.891717] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1913.892313] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb5556e9-8325-4147-a36f-ee20e6507b2c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.900338] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1913.900338] env[62619]: value = "task-1778563" [ 1913.900338] env[62619]: _type = "Task" [ 1913.900338] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.908712] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778563, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.914829] env[62619]: DEBUG nova.scheduler.client.report [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1914.030186] env[62619]: DEBUG nova.objects.base [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Object Instance<8ed2e4af-b484-4cd5-89c0-6ba60188127a> lazy-loaded attributes: info_cache,migration_context {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1914.031139] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07e683a-ca04-43d2-a966-85ef2489fb21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.050922] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db6a50f1-6fa1-4976-b758-40bc4486d47b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.056281] env[62619]: DEBUG oslo_vmware.api [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1914.056281] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525bde3c-2f1f-dac9-93bd-cd77dfd6580c" [ 1914.056281] env[62619]: _type = "Task" [ 1914.056281] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.064839] env[62619]: DEBUG oslo_vmware.api [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525bde3c-2f1f-dac9-93bd-cd77dfd6580c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.103536] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778562, 'name': CreateVM_Task, 'duration_secs': 0.416665} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.103711] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1914.104420] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.104609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.104928] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1914.105208] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1556659-1d94-4c11-9463-44a0b9510875 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.110192] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1914.110192] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525f69e0-75b1-7304-2708-94a092c59764" [ 1914.110192] env[62619]: _type = "Task" [ 1914.110192] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.117977] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525f69e0-75b1-7304-2708-94a092c59764, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.410310] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778563, 'name': PowerOffVM_Task, 'duration_secs': 0.276067} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.410668] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1914.411334] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2334f3b8-5d75-451a-9f0a-9c49c9eeec02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.430784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.743s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.433557] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df771fd-a91b-43e1-b517-60079c028a85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.448241] env[62619]: INFO nova.scheduler.client.report [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Deleted allocations for instance e302e431-1f95-4ab5-bfca-59450fd887f0 [ 1914.566296] env[62619]: DEBUG oslo_vmware.api [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525bde3c-2f1f-dac9-93bd-cd77dfd6580c, 'name': SearchDatastore_Task, 'duration_secs': 0.007262} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.566578] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.566840] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.620874] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525f69e0-75b1-7304-2708-94a092c59764, 'name': SearchDatastore_Task, 'duration_secs': 0.01029} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.621166] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.621407] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1914.621637] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.621779] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.621952] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1914.622230] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-499b7a8c-89c5-4ab8-9bd0-f0608b0b8fee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.630960] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1914.631161] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1914.631885] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-982159e2-e202-49ad-8595-9f2d7d2427b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.637571] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1914.637571] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522ffc38-f439-c07d-eb9b-de249677c81b" [ 1914.637571] env[62619]: _type = "Task" [ 1914.637571] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.645705] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522ffc38-f439-c07d-eb9b-de249677c81b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.944111] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1914.944403] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fa9cbd6c-c466-4b52-84bb-ea71c0f725e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.955925] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1914.955925] env[62619]: value = "task-1778564" [ 1914.955925] env[62619]: _type = "Task" [ 1914.955925] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.956135] env[62619]: DEBUG oslo_concurrency.lockutils [None req-59b4057e-6bb9-49ec-8947-2b590f2a5a4d tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "e302e431-1f95-4ab5-bfca-59450fd887f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.837s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.968171] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778564, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.075171] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "8745aa7f-9848-4320-94b5-08b7e3bccf80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.075334] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "8745aa7f-9848-4320-94b5-08b7e3bccf80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.075564] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "8745aa7f-9848-4320-94b5-08b7e3bccf80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.076306] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "8745aa7f-9848-4320-94b5-08b7e3bccf80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.077142] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "8745aa7f-9848-4320-94b5-08b7e3bccf80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.079346] env[62619]: INFO nova.compute.manager [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Terminating instance [ 1915.150317] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522ffc38-f439-c07d-eb9b-de249677c81b, 'name': SearchDatastore_Task, 'duration_secs': 0.020695} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.151126] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c0563c1-726e-4205-9e14-f0655940a481 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.158639] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1915.158639] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520b5659-0179-b3cd-40f2-aabffc5413f4" [ 1915.158639] env[62619]: _type = "Task" [ 1915.158639] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.166492] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520b5659-0179-b3cd-40f2-aabffc5413f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.224497] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a591f4ab-a033-4451-b704-dea63e30e00d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.232350] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692909ac-0512-48a9-b631-f74539ecb39c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.263184] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e48d93-ec26-485b-9bc3-09c8c444505d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.271091] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75aec29f-3a1e-410f-bbc9-6170ee917a27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.284577] env[62619]: DEBUG nova.compute.provider_tree [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1915.467931] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778564, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.583110] env[62619]: DEBUG nova.compute.manager [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1915.583342] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1915.584419] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e086fec9-31e7-439a-946b-afca4c4d3087 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.594936] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1915.595294] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b737785-18f2-4621-a8f5-cc05645719c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.603529] env[62619]: DEBUG oslo_vmware.api [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1915.603529] env[62619]: value = "task-1778565" [ 1915.603529] env[62619]: _type = "Task" [ 1915.603529] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.612830] env[62619]: DEBUG oslo_vmware.api [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.672424] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520b5659-0179-b3cd-40f2-aabffc5413f4, 'name': SearchDatastore_Task, 'duration_secs': 0.010015} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.672834] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.673229] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a2064f8f-b928-44c0-86d8-c0bb9882dbde/a2064f8f-b928-44c0-86d8-c0bb9882dbde.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1915.673618] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0303d7c6-dc79-4ebe-998f-3c5600f6644f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.682289] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1915.682289] env[62619]: value = "task-1778566" [ 1915.682289] env[62619]: _type = "Task" [ 1915.682289] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.693777] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778566, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.788270] env[62619]: DEBUG nova.scheduler.client.report [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1915.970067] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778564, 'name': CreateSnapshot_Task, 'duration_secs': 0.735156} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.970314] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1915.971082] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd43b2f-7453-406f-aba8-ea50e6cc4e78 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.114847] env[62619]: DEBUG oslo_vmware.api [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778565, 'name': PowerOffVM_Task, 'duration_secs': 0.176558} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.115164] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1916.115400] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1916.115707] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-710b289b-c78e-4964-be89-8f068f7a125d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.193196] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778566, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.205372] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1916.205592] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1916.205771] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Deleting the datastore file [datastore1] 8745aa7f-9848-4320-94b5-08b7e3bccf80 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1916.206050] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1c9d039-88bc-4d6e-a74a-dc41a36c41cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.212552] env[62619]: DEBUG oslo_vmware.api [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for the task: (returnval){ [ 1916.212552] env[62619]: value = "task-1778568" [ 1916.212552] env[62619]: _type = "Task" [ 1916.212552] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.220761] env[62619]: DEBUG oslo_vmware.api [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.493585] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1916.493953] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-350851b1-5e38-4632-b55c-af523d2e162c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.503210] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1916.503210] env[62619]: value = "task-1778569" [ 1916.503210] env[62619]: _type = "Task" [ 1916.503210] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.511845] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778569, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.694454] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778566, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517065} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.694764] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] a2064f8f-b928-44c0-86d8-c0bb9882dbde/a2064f8f-b928-44c0-86d8-c0bb9882dbde.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1916.694981] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1916.695324] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a908a6ef-e0ce-4881-8377-3e729e73bf2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.702306] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1916.702306] env[62619]: value = "task-1778570" [ 1916.702306] env[62619]: _type = "Task" [ 1916.702306] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.715457] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778570, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.723612] env[62619]: DEBUG oslo_vmware.api [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Task: {'id': task-1778568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156229} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.723917] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1916.724106] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1916.724278] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1916.724452] env[62619]: INFO nova.compute.manager [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1916.724694] env[62619]: DEBUG oslo.service.loopingcall [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1916.724882] env[62619]: DEBUG nova.compute.manager [-] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1916.724977] env[62619]: DEBUG nova.network.neutron [-] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1916.799393] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.232s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.015127] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778569, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.105178] env[62619]: DEBUG nova.compute.manager [req-49211e89-3b1b-4f17-b7e2-26dc75b9cd78 req-d8c02013-77e6-4f7c-9b71-29a4d3e66803 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Received event network-vif-deleted-9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1917.105454] env[62619]: INFO nova.compute.manager [req-49211e89-3b1b-4f17-b7e2-26dc75b9cd78 req-d8c02013-77e6-4f7c-9b71-29a4d3e66803 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Neutron deleted interface 9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01; detaching it from the instance and deleting it from the info cache [ 1917.105563] env[62619]: DEBUG nova.network.neutron [req-49211e89-3b1b-4f17-b7e2-26dc75b9cd78 req-d8c02013-77e6-4f7c-9b71-29a4d3e66803 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.211650] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778570, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074594} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.211922] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1917.212726] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3178a15-b9e1-43e6-8320-23f8b6e36669 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.236244] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] a2064f8f-b928-44c0-86d8-c0bb9882dbde/a2064f8f-b928-44c0-86d8-c0bb9882dbde.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1917.236889] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c62a1624-8e54-432c-a3e6-f3f96ea1b36d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.256869] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1917.256869] env[62619]: value = "task-1778571" [ 1917.256869] env[62619]: _type = "Task" [ 1917.256869] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.264621] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778571, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.275696] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5214b389-ef5a-b235-44e0-9bd1076434d4/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1917.276697] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c6836d-d584-42ca-8a20-bf4a90e52892 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.282612] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5214b389-ef5a-b235-44e0-9bd1076434d4/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1917.282775] env[62619]: ERROR oslo_vmware.rw_handles [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5214b389-ef5a-b235-44e0-9bd1076434d4/disk-0.vmdk due to incomplete transfer. [ 1917.282989] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fcaf5273-2b5a-4457-aa8a-0c464d3d0bad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.289650] env[62619]: DEBUG oslo_vmware.rw_handles [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5214b389-ef5a-b235-44e0-9bd1076434d4/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1917.289853] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Uploaded image 8f8142ab-6142-4a84-93a9-2b4bd7e31086 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1917.291973] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1917.292219] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-31f95d09-152a-4ffa-8ea1-2b12d65a84ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.297778] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1917.297778] env[62619]: value = "task-1778572" [ 1917.297778] env[62619]: _type = "Task" [ 1917.297778] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.307987] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778572, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.363389] env[62619]: INFO nova.scheduler.client.report [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted allocation for migration 268e53a3-9ffc-4793-999c-34fa996ac0a8 [ 1917.488471] env[62619]: DEBUG nova.network.neutron [-] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.514395] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778569, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.609818] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c87effae-c807-4c25-9016-3a41990a398c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.619686] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ea6ec3-4cc0-46e5-bc0e-459649972926 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.656238] env[62619]: DEBUG nova.compute.manager [req-49211e89-3b1b-4f17-b7e2-26dc75b9cd78 req-d8c02013-77e6-4f7c-9b71-29a4d3e66803 service nova] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Detach interface failed, port_id=9abe90c7-65dc-447a-bc9a-a7fa8f9f3a01, reason: Instance 8745aa7f-9848-4320-94b5-08b7e3bccf80 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1917.767269] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778571, 'name': ReconfigVM_Task, 'duration_secs': 0.392272} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.767604] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Reconfigured VM instance instance-00000071 to attach disk [datastore1] a2064f8f-b928-44c0-86d8-c0bb9882dbde/a2064f8f-b928-44c0-86d8-c0bb9882dbde.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1917.768213] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7906740-184b-49c7-a808-8a90b3d61ca1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.774594] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1917.774594] env[62619]: value = "task-1778573" [ 1917.774594] env[62619]: _type = "Task" [ 1917.774594] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.782415] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778573, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.806693] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778572, 'name': Destroy_Task, 'duration_secs': 0.404536} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.806951] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Destroyed the VM [ 1917.807219] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1917.807456] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d09dd77a-da16-4627-bb91-c0a4eabf43d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.812856] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1917.812856] env[62619]: value = "task-1778574" [ 1917.812856] env[62619]: _type = "Task" [ 1917.812856] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.819912] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778574, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.869368] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.672s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.991484] env[62619]: INFO nova.compute.manager [-] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Took 1.27 seconds to deallocate network for instance. [ 1918.016590] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778569, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.284376] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778573, 'name': Rename_Task, 'duration_secs': 0.138955} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.284714] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1918.284941] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-130dd9ca-d667-442b-8c1e-6108d18f572c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.291526] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1918.291526] env[62619]: value = "task-1778575" [ 1918.291526] env[62619]: _type = "Task" [ 1918.291526] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.298776] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778575, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.321932] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778574, 'name': RemoveSnapshot_Task, 'duration_secs': 0.34733} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.322228] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1918.322496] env[62619]: DEBUG nova.compute.manager [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1918.323238] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31c313c-cec4-48ca-8cfe-3afc7b7f3a4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.498997] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.499315] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.499545] env[62619]: DEBUG nova.objects.instance [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lazy-loading 'resources' on Instance uuid 8745aa7f-9848-4320-94b5-08b7e3bccf80 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1918.515085] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778569, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.706273] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.706894] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.801217] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778575, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.834413] env[62619]: INFO nova.compute.manager [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Shelve offloading [ 1919.015619] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778569, 'name': CloneVM_Task, 'duration_secs': 2.272987} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.015991] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Created linked-clone VM from snapshot [ 1919.016821] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7320cb6a-b567-487a-83a2-4e9b415d340e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.023787] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Uploading image d6107f81-5dc7-4bd9-877a-caaf1e8d2265 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1919.047101] env[62619]: DEBUG oslo_vmware.rw_handles [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1919.047101] env[62619]: value = "vm-369181" [ 1919.047101] env[62619]: _type = "VirtualMachine" [ 1919.047101] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1919.047355] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7503ef5b-ad21-408e-8c37-ecebeb966e79 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.055854] env[62619]: DEBUG oslo_vmware.rw_handles [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lease: (returnval){ [ 1919.055854] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5286cc96-96f4-c307-73b4-f798e716c4c0" [ 1919.055854] env[62619]: _type = "HttpNfcLease" [ 1919.055854] env[62619]: } obtained for exporting VM: (result){ [ 1919.055854] env[62619]: value = "vm-369181" [ 1919.055854] env[62619]: _type = "VirtualMachine" [ 1919.055854] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1919.056599] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the lease: (returnval){ [ 1919.056599] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5286cc96-96f4-c307-73b4-f798e716c4c0" [ 1919.056599] env[62619]: _type = "HttpNfcLease" [ 1919.056599] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1919.064252] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1919.064252] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5286cc96-96f4-c307-73b4-f798e716c4c0" [ 1919.064252] env[62619]: _type = "HttpNfcLease" [ 1919.064252] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1919.133273] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84799b01-77fa-415c-98ff-684bf11d790b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.140498] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb77db0-eaa0-4631-adc7-80bd9ee3d8df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.169565] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833b392c-fdce-44dd-a3ab-170ea5032208 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.176538] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c565b1-90b2-4815-b5f9-705ac6c6550b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.189109] env[62619]: DEBUG nova.compute.provider_tree [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1919.209880] env[62619]: DEBUG nova.compute.utils [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1919.215754] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.215971] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.216212] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.216472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.216555] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.218249] env[62619]: INFO nova.compute.manager [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Terminating instance [ 1919.301263] env[62619]: DEBUG oslo_vmware.api [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778575, 'name': PowerOnVM_Task, 'duration_secs': 0.720975} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.301525] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1919.301721] env[62619]: INFO nova.compute.manager [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Took 9.36 seconds to spawn the instance on the hypervisor. [ 1919.301896] env[62619]: DEBUG nova.compute.manager [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1919.302645] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278ec128-5144-47ea-b07e-5ed1af9be5b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.338637] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1919.338995] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b29af00-ef56-42a8-9214-b32f68190608 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.345230] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1919.345230] env[62619]: value = "task-1778577" [ 1919.345230] env[62619]: _type = "Task" [ 1919.345230] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.352877] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.564876] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1919.564876] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5286cc96-96f4-c307-73b4-f798e716c4c0" [ 1919.564876] env[62619]: _type = "HttpNfcLease" [ 1919.564876] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1919.565417] env[62619]: DEBUG oslo_vmware.rw_handles [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1919.565417] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5286cc96-96f4-c307-73b4-f798e716c4c0" [ 1919.565417] env[62619]: _type = "HttpNfcLease" [ 1919.565417] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1919.565883] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370c903b-b7e3-477d-97fa-76a835d86768 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.572675] env[62619]: DEBUG oslo_vmware.rw_handles [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bf408-57d9-7924-39d9-422e5f7c94db/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1919.572853] env[62619]: DEBUG oslo_vmware.rw_handles [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bf408-57d9-7924-39d9-422e5f7c94db/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1919.681407] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-663d1101-36ff-434d-999e-bd636d773aab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.691823] env[62619]: DEBUG nova.scheduler.client.report [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1919.711894] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.721249] env[62619]: DEBUG nova.compute.manager [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1919.721395] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1919.722442] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5eec208-66f9-48ee-96de-2d40314e0d69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.729867] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1919.730358] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f875a2c-d015-4651-b070-b7afb49e7779 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.736022] env[62619]: DEBUG oslo_vmware.api [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1919.736022] env[62619]: value = "task-1778578" [ 1919.736022] env[62619]: _type = "Task" [ 1919.736022] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.743772] env[62619]: DEBUG oslo_vmware.api [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.821117] env[62619]: INFO nova.compute.manager [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Took 14.15 seconds to build instance. [ 1919.857888] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1919.858198] env[62619]: DEBUG nova.compute.manager [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1919.860305] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4aba5a2-b3cc-4090-ab5a-7b80845ffc30 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.869576] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.869790] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.870025] env[62619]: DEBUG nova.network.neutron [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1920.198254] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.698s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.227522] env[62619]: INFO nova.scheduler.client.report [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Deleted allocations for instance 8745aa7f-9848-4320-94b5-08b7e3bccf80 [ 1920.249374] env[62619]: DEBUG oslo_vmware.api [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778578, 'name': PowerOffVM_Task, 'duration_secs': 0.188355} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.251428] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1920.251951] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1920.252474] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52bef56f-4b6f-4b75-8ce8-30cafa258dc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.323789] env[62619]: DEBUG nova.compute.manager [req-2eef0275-6a62-4242-8890-9670e5ed7500 req-3773161c-9b75-4a06-ae63-8af46f0c547b service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received event network-changed-35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1920.324015] env[62619]: DEBUG nova.compute.manager [req-2eef0275-6a62-4242-8890-9670e5ed7500 req-3773161c-9b75-4a06-ae63-8af46f0c547b service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing instance network info cache due to event network-changed-35375282-c697-45eb-a87c-d85555a9012b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1920.324314] env[62619]: DEBUG oslo_concurrency.lockutils [req-2eef0275-6a62-4242-8890-9670e5ed7500 req-3773161c-9b75-4a06-ae63-8af46f0c547b service nova] Acquiring lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.324686] env[62619]: DEBUG oslo_concurrency.lockutils [req-2eef0275-6a62-4242-8890-9670e5ed7500 req-3773161c-9b75-4a06-ae63-8af46f0c547b service nova] Acquired lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1920.324862] env[62619]: DEBUG nova.network.neutron [req-2eef0275-6a62-4242-8890-9670e5ed7500 req-3773161c-9b75-4a06-ae63-8af46f0c547b service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing network info cache for port 35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1920.328324] env[62619]: DEBUG oslo_concurrency.lockutils [None req-399af842-1da6-494e-9e6c-7fa238073894 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.667s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.358708] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1920.359884] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1920.359884] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleting the datastore file [datastore1] 8ed2e4af-b484-4cd5-89c0-6ba60188127a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1920.359884] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fff4535a-bd22-42ba-924e-111169e54e69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.366645] env[62619]: DEBUG oslo_vmware.api [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for the task: (returnval){ [ 1920.366645] env[62619]: value = "task-1778580" [ 1920.366645] env[62619]: _type = "Task" [ 1920.366645] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.377611] env[62619]: DEBUG oslo_vmware.api [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.735877] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4afe0d08-d9dd-4cbc-b3bf-10532e75c89e tempest-ServerRescueNegativeTestJSON-1542509938 tempest-ServerRescueNegativeTestJSON-1542509938-project-member] Lock "8745aa7f-9848-4320-94b5-08b7e3bccf80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.660s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.800784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.800784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.800784] env[62619]: INFO nova.compute.manager [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Attaching volume 88d1f447-9b6e-467a-a32e-2d78a5468a11 to /dev/sdb [ 1920.802526] env[62619]: DEBUG nova.network.neutron [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updating instance_info_cache with network_info: [{"id": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "address": "fa:16:3e:3b:fe:49", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb78d1b6e-10", "ovs_interfaceid": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.838982] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c655551-2943-46b4-a117-3e43147f0c8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.847383] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4102b0d-266a-45bc-aae8-8306c177f824 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.865551] env[62619]: DEBUG nova.virt.block_device [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Updating existing volume attachment record: 5c6814bd-0f79-4ec1-8a6d-78d70eb3246c {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1920.876651] env[62619]: DEBUG oslo_vmware.api [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Task: {'id': task-1778580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.258373} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.876918] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1920.877087] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1920.877266] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1920.877445] env[62619]: INFO nova.compute.manager [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1920.877758] env[62619]: DEBUG oslo.service.loopingcall [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1920.877972] env[62619]: DEBUG nova.compute.manager [-] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1920.878258] env[62619]: DEBUG nova.network.neutron [-] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1921.124972] env[62619]: DEBUG nova.network.neutron [req-2eef0275-6a62-4242-8890-9670e5ed7500 req-3773161c-9b75-4a06-ae63-8af46f0c547b service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updated VIF entry in instance network info cache for port 35375282-c697-45eb-a87c-d85555a9012b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1921.125421] env[62619]: DEBUG nova.network.neutron [req-2eef0275-6a62-4242-8890-9670e5ed7500 req-3773161c-9b75-4a06-ae63-8af46f0c547b service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [{"id": "35375282-c697-45eb-a87c-d85555a9012b", "address": "fa:16:3e:87:9d:97", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35375282-c6", "ovs_interfaceid": "35375282-c697-45eb-a87c-d85555a9012b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.305864] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.363086] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.363413] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.628668] env[62619]: DEBUG oslo_concurrency.lockutils [req-2eef0275-6a62-4242-8890-9670e5ed7500 req-3773161c-9b75-4a06-ae63-8af46f0c547b service nova] Releasing lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.696086] env[62619]: DEBUG nova.network.neutron [-] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.869277] env[62619]: DEBUG nova.compute.manager [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1922.158612] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1922.159598] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac00f3d2-cb52-44d8-803a-488fa189165c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.167726] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1922.168198] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-117e4025-006d-485d-be49-f70d033a48fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.199221] env[62619]: INFO nova.compute.manager [-] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Took 1.32 seconds to deallocate network for instance. [ 1922.255069] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1922.255280] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1922.255351] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleting the datastore file [datastore1] a0953370-77f2-4e3b-a92e-cb12b3a82361 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1922.255634] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-712b713e-401f-4633-8067-01308df76840 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.262081] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1922.262081] env[62619]: value = "task-1778585" [ 1922.262081] env[62619]: _type = "Task" [ 1922.262081] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.270578] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.351263] env[62619]: DEBUG nova.compute.manager [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Received event network-vif-deleted-ac4a39b7-453e-42d2-93cf-fa1d7e2640ee {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1922.351472] env[62619]: DEBUG nova.compute.manager [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received event network-vif-unplugged-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1922.351648] env[62619]: DEBUG oslo_concurrency.lockutils [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] Acquiring lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.351855] env[62619]: DEBUG oslo_concurrency.lockutils [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.352074] env[62619]: DEBUG oslo_concurrency.lockutils [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.352260] env[62619]: DEBUG nova.compute.manager [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] No waiting events found dispatching network-vif-unplugged-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1922.352485] env[62619]: WARNING nova.compute.manager [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received unexpected event network-vif-unplugged-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c for instance with vm_state shelved and task_state shelving_offloading. [ 1922.352585] env[62619]: DEBUG nova.compute.manager [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received event network-changed-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1922.352733] env[62619]: DEBUG nova.compute.manager [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Refreshing instance network info cache due to event network-changed-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1922.352941] env[62619]: DEBUG oslo_concurrency.lockutils [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] Acquiring lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.353065] env[62619]: DEBUG oslo_concurrency.lockutils [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] Acquired lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.353221] env[62619]: DEBUG nova.network.neutron [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Refreshing network info cache for port b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1922.397129] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.397450] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.399745] env[62619]: INFO nova.compute.claims [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1922.713385] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.774269] env[62619]: DEBUG oslo_vmware.api [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203497} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.774544] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1922.774722] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1922.774885] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1922.797827] env[62619]: INFO nova.scheduler.client.report [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleted allocations for instance a0953370-77f2-4e3b-a92e-cb12b3a82361 [ 1923.239632] env[62619]: DEBUG nova.network.neutron [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updated VIF entry in instance network info cache for port b78d1b6e-104b-4041-bcc5-5802f9f6fe3c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1923.240064] env[62619]: DEBUG nova.network.neutron [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updating instance_info_cache with network_info: [{"id": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "address": "fa:16:3e:3b:fe:49", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": null, "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapb78d1b6e-10", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.302289] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.543757] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbdbbfc-f96d-489e-95fd-2f002f01c736 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.551406] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac2c190-7e33-498f-8906-984126f37deb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.581463] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43aa174-77fd-4d51-9aaa-d5c15912fed9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.589476] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7da8260-ae91-45b5-979c-706d40b54136 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.603190] env[62619]: DEBUG nova.compute.provider_tree [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1923.743709] env[62619]: DEBUG oslo_concurrency.lockutils [req-a06e5ab7-4d84-44d0-885f-23264e19ce16 req-0d2fb19d-fc97-4b6b-8d3d-aecd46a0b345 service nova] Releasing lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1924.124498] env[62619]: ERROR nova.scheduler.client.report [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [req-b15b748c-eeed-482d-b3cb-72cb8c074f29] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b15b748c-eeed-482d-b3cb-72cb8c074f29"}]} [ 1924.141762] env[62619]: DEBUG nova.scheduler.client.report [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1924.156553] env[62619]: DEBUG nova.scheduler.client.report [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1924.156784] env[62619]: DEBUG nova.compute.provider_tree [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1924.169952] env[62619]: DEBUG nova.scheduler.client.report [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1924.188022] env[62619]: DEBUG nova.scheduler.client.report [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1924.320551] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fef54d0-98eb-41ee-91d0-e9c5092f948d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.328458] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd90f63f-5117-4947-918a-e4b399b5b2e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.358459] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb83684-57e3-4536-8a27-7314bb3aed67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.365899] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6976ccf-23f9-400f-904e-1f5d03c58e41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.379173] env[62619]: DEBUG nova.compute.provider_tree [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1924.416034] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.914951] env[62619]: DEBUG nova.scheduler.client.report [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 168 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1924.915274] env[62619]: DEBUG nova.compute.provider_tree [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 168 to 169 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1924.915458] env[62619]: DEBUG nova.compute.provider_tree [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1925.416071] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1925.416364] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369183', 'volume_id': '88d1f447-9b6e-467a-a32e-2d78a5468a11', 'name': 'volume-88d1f447-9b6e-467a-a32e-2d78a5468a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cec0ea75-042d-4ee5-91d5-cad86456ab97', 'attached_at': '', 'detached_at': '', 'volume_id': '88d1f447-9b6e-467a-a32e-2d78a5468a11', 'serial': '88d1f447-9b6e-467a-a32e-2d78a5468a11'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1925.417313] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08d2c92-bee0-462d-b386-c3008ecc4dd8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.422318] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.025s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.422824] env[62619]: DEBUG nova.compute.manager [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1925.438603] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.725s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.438819] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.440899] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.139s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.441127] env[62619]: DEBUG nova.objects.instance [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'resources' on Instance uuid a0953370-77f2-4e3b-a92e-cb12b3a82361 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1925.443602] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b313a10d-4fe4-4634-801d-38406d8b4ada {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.469994] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] volume-88d1f447-9b6e-467a-a32e-2d78a5468a11/volume-88d1f447-9b6e-467a-a32e-2d78a5468a11.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1925.471194] env[62619]: INFO nova.scheduler.client.report [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Deleted allocations for instance 8ed2e4af-b484-4cd5-89c0-6ba60188127a [ 1925.472428] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75b9bd94-af6a-49c1-bf24-c83934136f21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.494161] env[62619]: DEBUG oslo_vmware.api [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1925.494161] env[62619]: value = "task-1778587" [ 1925.494161] env[62619]: _type = "Task" [ 1925.494161] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.503201] env[62619]: DEBUG oslo_vmware.api [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778587, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.945116] env[62619]: DEBUG nova.compute.utils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1925.946577] env[62619]: DEBUG nova.compute.manager [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1925.946766] env[62619]: DEBUG nova.network.neutron [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1925.949271] env[62619]: DEBUG nova.objects.instance [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'numa_topology' on Instance uuid a0953370-77f2-4e3b-a92e-cb12b3a82361 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1925.993825] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a3fc5b4-ab94-48a3-ba48-497d8f6c8412 tempest-DeleteServersTestJSON-378247299 tempest-DeleteServersTestJSON-378247299-project-member] Lock "8ed2e4af-b484-4cd5-89c0-6ba60188127a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.778s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.999904] env[62619]: DEBUG nova.policy [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8d937f303584c3daea133a6283fd5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23d77e73a09d492695fbfe6ac2c93371', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1926.006737] env[62619]: DEBUG oslo_vmware.api [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778587, 'name': ReconfigVM_Task, 'duration_secs': 0.496587} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.007124] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Reconfigured VM instance instance-0000006e to attach disk [datastore1] volume-88d1f447-9b6e-467a-a32e-2d78a5468a11/volume-88d1f447-9b6e-467a-a32e-2d78a5468a11.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1926.012211] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51c99a0e-2986-450e-b9d3-4a110dd1febc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.028410] env[62619]: DEBUG oslo_vmware.api [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1926.028410] env[62619]: value = "task-1778588" [ 1926.028410] env[62619]: _type = "Task" [ 1926.028410] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.038881] env[62619]: DEBUG oslo_vmware.api [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778588, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.276384] env[62619]: DEBUG nova.network.neutron [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Successfully created port: 56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1926.454020] env[62619]: DEBUG nova.compute.manager [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1926.455708] env[62619]: DEBUG nova.objects.base [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1926.547432] env[62619]: DEBUG oslo_vmware.api [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778588, 'name': ReconfigVM_Task, 'duration_secs': 0.210615} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.548526] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369183', 'volume_id': '88d1f447-9b6e-467a-a32e-2d78a5468a11', 'name': 'volume-88d1f447-9b6e-467a-a32e-2d78a5468a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cec0ea75-042d-4ee5-91d5-cad86456ab97', 'attached_at': '', 'detached_at': '', 'volume_id': '88d1f447-9b6e-467a-a32e-2d78a5468a11', 'serial': '88d1f447-9b6e-467a-a32e-2d78a5468a11'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1926.613104] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b76807-b2f5-4805-bcfc-300acba2d08d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.622092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64b35aa-a9ea-41c9-bcee-f5c9110ca18e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.655625] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dce6a8-5035-4eb2-9268-67cb54ad23f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.663660] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c516da-7d43-493b-be83-743f2333341b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.679648] env[62619]: DEBUG nova.compute.provider_tree [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.183756] env[62619]: DEBUG nova.scheduler.client.report [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1927.222064] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.222064] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.222064] env[62619]: INFO nova.compute.manager [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Rebooting instance [ 1927.255236] env[62619]: DEBUG oslo_vmware.rw_handles [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bf408-57d9-7924-39d9-422e5f7c94db/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1927.256210] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d94961-0194-4be1-81c2-443b4a6349fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.263032] env[62619]: DEBUG oslo_vmware.rw_handles [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bf408-57d9-7924-39d9-422e5f7c94db/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1927.263032] env[62619]: ERROR oslo_vmware.rw_handles [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bf408-57d9-7924-39d9-422e5f7c94db/disk-0.vmdk due to incomplete transfer. [ 1927.263252] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-49e86e7b-3fc6-4f35-a12f-c833bb1b3628 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.270476] env[62619]: DEBUG oslo_vmware.rw_handles [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bf408-57d9-7924-39d9-422e5f7c94db/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1927.270713] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Uploaded image d6107f81-5dc7-4bd9-877a-caaf1e8d2265 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1927.273196] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1927.273466] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0d2cba75-708a-4f05-9f15-affc0aba9892 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.279954] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1927.279954] env[62619]: value = "task-1778590" [ 1927.279954] env[62619]: _type = "Task" [ 1927.279954] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.288198] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778590, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.462050] env[62619]: DEBUG nova.compute.manager [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1927.485828] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1927.486137] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1927.486338] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1927.486547] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1927.486710] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1927.486879] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1927.487164] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1927.487375] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1927.487565] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1927.487752] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1927.487949] env[62619]: DEBUG nova.virt.hardware [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1927.488845] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876c5ccc-f60a-4ede-9418-640d0203c62c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.497173] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3647e0ed-8020-4809-b19a-77a4754a60e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.587704] env[62619]: DEBUG nova.objects.instance [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'flavor' on Instance uuid cec0ea75-042d-4ee5-91d5-cad86456ab97 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1927.688909] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.248s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.726078] env[62619]: DEBUG nova.compute.manager [req-5e12ba9d-7c4f-40f7-87fd-0fad2509d6e3 req-ddd94171-2c00-4d21-9e16-c5bf70726f38 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Received event network-vif-plugged-56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1927.726078] env[62619]: DEBUG oslo_concurrency.lockutils [req-5e12ba9d-7c4f-40f7-87fd-0fad2509d6e3 req-ddd94171-2c00-4d21-9e16-c5bf70726f38 service nova] Acquiring lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.726078] env[62619]: DEBUG oslo_concurrency.lockutils [req-5e12ba9d-7c4f-40f7-87fd-0fad2509d6e3 req-ddd94171-2c00-4d21-9e16-c5bf70726f38 service nova] Lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.726078] env[62619]: DEBUG oslo_concurrency.lockutils [req-5e12ba9d-7c4f-40f7-87fd-0fad2509d6e3 req-ddd94171-2c00-4d21-9e16-c5bf70726f38 service nova] Lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.726078] env[62619]: DEBUG nova.compute.manager [req-5e12ba9d-7c4f-40f7-87fd-0fad2509d6e3 req-ddd94171-2c00-4d21-9e16-c5bf70726f38 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] No waiting events found dispatching network-vif-plugged-56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1927.726078] env[62619]: WARNING nova.compute.manager [req-5e12ba9d-7c4f-40f7-87fd-0fad2509d6e3 req-ddd94171-2c00-4d21-9e16-c5bf70726f38 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Received unexpected event network-vif-plugged-56f48ead-6f6e-451e-af3c-2634f9797c5e for instance with vm_state building and task_state spawning. [ 1927.744478] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.744663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.744841] env[62619]: DEBUG nova.network.neutron [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1927.790997] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778590, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.813651] env[62619]: DEBUG nova.network.neutron [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Successfully updated port: 56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1928.093407] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e8a12e8a-4013-4812-b251-3b25c1b8f30f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.294s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.197725] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.198128] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.198425] env[62619]: DEBUG nova.compute.manager [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1928.201994] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83045e3-1b58-4ff4-9926-484e703266a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.205142] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8a8ffa2-5d76-45fc-be21-99f03cb3ada3 tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.380s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.206090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.790s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.206285] env[62619]: INFO nova.compute.manager [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Unshelving [ 1928.211981] env[62619]: DEBUG nova.compute.manager [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1928.212557] env[62619]: DEBUG nova.objects.instance [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'flavor' on Instance uuid cec0ea75-042d-4ee5-91d5-cad86456ab97 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1928.289475] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778590, 'name': Destroy_Task, 'duration_secs': 0.984711} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.289756] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Destroyed the VM [ 1928.290013] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1928.290315] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bf983d79-3f67-4a07-91fb-e39c0adbb9e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.296276] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1928.296276] env[62619]: value = "task-1778591" [ 1928.296276] env[62619]: _type = "Task" [ 1928.296276] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.304042] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778591, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.317934] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.318112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.318236] env[62619]: DEBUG nova.network.neutron [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1928.491065] env[62619]: DEBUG nova.network.neutron [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [{"id": "b1a6212d-63f4-4343-9100-d88707a89c10", "address": "fa:16:3e:48:b2:0f", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a6212d-63", "ovs_interfaceid": "b1a6212d-63f4-4343-9100-d88707a89c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.806468] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778591, 'name': RemoveSnapshot_Task, 'duration_secs': 0.32408} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.806739] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1928.807022] env[62619]: DEBUG nova.compute.manager [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1928.807797] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0ae47e-417a-4aa4-8d44-0441a0cd32f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.854623] env[62619]: DEBUG nova.network.neutron [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1928.990280] env[62619]: DEBUG nova.network.neutron [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updating instance_info_cache with network_info: [{"id": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "address": "fa:16:3e:8b:e2:fc", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f48ead-6f", "ovs_interfaceid": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.994529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.220098] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1929.220429] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a685666-2f8b-4b29-a5bb-71df76cfcb5e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.228512] env[62619]: DEBUG oslo_vmware.api [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1929.228512] env[62619]: value = "task-1778592" [ 1929.228512] env[62619]: _type = "Task" [ 1929.228512] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.232931] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.233213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.233439] env[62619]: DEBUG nova.objects.instance [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'pci_requests' on Instance uuid a0953370-77f2-4e3b-a92e-cb12b3a82361 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1929.239726] env[62619]: DEBUG oslo_vmware.api [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.321596] env[62619]: INFO nova.compute.manager [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Shelve offloading [ 1929.493363] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.493703] env[62619]: DEBUG nova.compute.manager [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Instance network_info: |[{"id": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "address": "fa:16:3e:8b:e2:fc", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f48ead-6f", "ovs_interfaceid": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1929.494143] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:e2:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9aa05ef8-c7bb-4af5-983f-bfa0f3f88223', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56f48ead-6f6e-451e-af3c-2634f9797c5e', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1929.502014] env[62619]: DEBUG oslo.service.loopingcall [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1929.503804] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1929.504068] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46732bc7-534e-40c7-9d0e-876ba8be3831 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.518965] env[62619]: DEBUG nova.compute.manager [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1929.519781] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655bf2f4-6c4f-4e1d-973c-a1e2f8c19793 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.527247] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1929.527247] env[62619]: value = "task-1778593" [ 1929.527247] env[62619]: _type = "Task" [ 1929.527247] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.537598] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778593, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.740169] env[62619]: DEBUG nova.objects.instance [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'numa_topology' on Instance uuid a0953370-77f2-4e3b-a92e-cb12b3a82361 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1929.741342] env[62619]: DEBUG oslo_vmware.api [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778592, 'name': PowerOffVM_Task, 'duration_secs': 0.168776} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.741893] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1929.742129] env[62619]: DEBUG nova.compute.manager [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1929.743016] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47505aa-f5d6-4b0a-aa75-ff917393712b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.753668] env[62619]: DEBUG nova.compute.manager [req-c8200307-697f-458a-b2b1-5e52fb6f6202 req-6fc0b103-92ca-41d6-ab3a-92ece6147a85 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Received event network-changed-56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1929.753870] env[62619]: DEBUG nova.compute.manager [req-c8200307-697f-458a-b2b1-5e52fb6f6202 req-6fc0b103-92ca-41d6-ab3a-92ece6147a85 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Refreshing instance network info cache due to event network-changed-56f48ead-6f6e-451e-af3c-2634f9797c5e. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1929.754097] env[62619]: DEBUG oslo_concurrency.lockutils [req-c8200307-697f-458a-b2b1-5e52fb6f6202 req-6fc0b103-92ca-41d6-ab3a-92ece6147a85 service nova] Acquiring lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.754242] env[62619]: DEBUG oslo_concurrency.lockutils [req-c8200307-697f-458a-b2b1-5e52fb6f6202 req-6fc0b103-92ca-41d6-ab3a-92ece6147a85 service nova] Acquired lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1929.754404] env[62619]: DEBUG nova.network.neutron [req-c8200307-697f-458a-b2b1-5e52fb6f6202 req-6fc0b103-92ca-41d6-ab3a-92ece6147a85 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Refreshing network info cache for port 56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1929.825531] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1929.825837] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc6df1b5-9b76-4ee7-9585-630027423b66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.832596] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1929.832596] env[62619]: value = "task-1778594" [ 1929.832596] env[62619]: _type = "Task" [ 1929.832596] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.841876] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778594, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.040770] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778593, 'name': CreateVM_Task, 'duration_secs': 0.366989} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.041151] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1930.041838] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.041999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.042328] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1930.042572] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7fd4d8f-1202-4e04-9a4a-2f37e0e7e105 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.047281] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1930.047281] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521a6d43-1989-fe3f-e61d-78917c005487" [ 1930.047281] env[62619]: _type = "Task" [ 1930.047281] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.055159] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521a6d43-1989-fe3f-e61d-78917c005487, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.242414] env[62619]: INFO nova.compute.claims [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1930.260233] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9914aae3-5093-4166-a6c4-f4f10bd9515f tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.062s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.345962] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1930.346066] env[62619]: DEBUG nova.compute.manager [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1930.346813] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1d763c-eb35-4036-a690-db4e2dc57ee1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.352829] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.352988] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.353173] env[62619]: DEBUG nova.network.neutron [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1930.495303] env[62619]: DEBUG nova.network.neutron [req-c8200307-697f-458a-b2b1-5e52fb6f6202 req-6fc0b103-92ca-41d6-ab3a-92ece6147a85 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updated VIF entry in instance network info cache for port 56f48ead-6f6e-451e-af3c-2634f9797c5e. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1930.495679] env[62619]: DEBUG nova.network.neutron [req-c8200307-697f-458a-b2b1-5e52fb6f6202 req-6fc0b103-92ca-41d6-ab3a-92ece6147a85 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updating instance_info_cache with network_info: [{"id": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "address": "fa:16:3e:8b:e2:fc", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f48ead-6f", "ovs_interfaceid": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1930.543441] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f354ce-a19b-44d8-b278-77145e29c3f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.552883] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Doing hard reboot of VM {{(pid=62619) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1930.553487] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-5a7a9d16-fcf4-4b30-9560-7e1ab1f047bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.558487] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521a6d43-1989-fe3f-e61d-78917c005487, 'name': SearchDatastore_Task, 'duration_secs': 0.012544} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.558793] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1930.559038] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1930.559401] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.559469] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.559622] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1930.559855] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6eaa7555-d01f-44bd-9ee2-98cf13e48f43 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.562905] env[62619]: DEBUG oslo_vmware.api [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1930.562905] env[62619]: value = "task-1778595" [ 1930.562905] env[62619]: _type = "Task" [ 1930.562905] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.568042] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1930.568228] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1930.571485] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b5bf27f-32d6-47a1-9179-9f8aaa6b3fb3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.573554] env[62619]: DEBUG oslo_vmware.api [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778595, 'name': ResetVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.576429] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1930.576429] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524422d8-3c86-fe8f-9724-64f036a7c449" [ 1930.576429] env[62619]: _type = "Task" [ 1930.576429] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.583632] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524422d8-3c86-fe8f-9724-64f036a7c449, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.620754] env[62619]: DEBUG nova.objects.instance [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'flavor' on Instance uuid cec0ea75-042d-4ee5-91d5-cad86456ab97 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1930.998779] env[62619]: DEBUG oslo_concurrency.lockutils [req-c8200307-697f-458a-b2b1-5e52fb6f6202 req-6fc0b103-92ca-41d6-ab3a-92ece6147a85 service nova] Releasing lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.073300] env[62619]: DEBUG oslo_vmware.api [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778595, 'name': ResetVM_Task, 'duration_secs': 0.091459} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.073587] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Did hard reboot of VM {{(pid=62619) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1931.076972] env[62619]: DEBUG nova.compute.manager [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1931.076972] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e065e42-2f9b-44cc-8763-b00c9283076c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.089594] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524422d8-3c86-fe8f-9724-64f036a7c449, 'name': SearchDatastore_Task, 'duration_secs': 0.014959} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.090557] env[62619]: DEBUG nova.network.neutron [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updating instance_info_cache with network_info: [{"id": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "address": "fa:16:3e:58:c2:30", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb52c0c61-cd", "ovs_interfaceid": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.092237] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebd045f4-1523-4d50-a5d8-e76264ba0e24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.098889] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1931.098889] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5238597e-fb5f-578d-029e-c404b4e89d02" [ 1931.098889] env[62619]: _type = "Task" [ 1931.098889] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.107339] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5238597e-fb5f-578d-029e-c404b4e89d02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.129151] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.129382] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquired lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.130854] env[62619]: DEBUG nova.network.neutron [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1931.130854] env[62619]: DEBUG nova.objects.instance [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'info_cache' on Instance uuid cec0ea75-042d-4ee5-91d5-cad86456ab97 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1931.401927] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4539ef02-e1c6-45c5-a5cb-cb3f9f81fc00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.409926] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227481fa-ec88-48e9-af33-c63cdf645d14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.442961] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba946925-c03f-4e3c-8ce9-8731924eb67a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.451235] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9bf211-fc2a-4d00-b39d-4dfc62b569c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.468109] env[62619]: DEBUG nova.compute.provider_tree [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1931.518083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "21d9fc7a-228e-4b33-8534-55285d4e6e96" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.518083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.518463] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "21d9fc7a-228e-4b33-8534-55285d4e6e96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.518463] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.518599] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.521192] env[62619]: INFO nova.compute.manager [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Terminating instance [ 1931.595295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.597346] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c133a6da-8cdb-49ee-8853-1c665b644d03 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.376s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.609489] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5238597e-fb5f-578d-029e-c404b4e89d02, 'name': SearchDatastore_Task, 'duration_secs': 0.010075} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.610243] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.610495] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 9e69e4ab-f219-4ba8-8f66-6063d8ab242a/9e69e4ab-f219-4ba8-8f66-6063d8ab242a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1931.610744] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70bdb44a-1035-47e3-a2ec-7a6b2ffcf05e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.618617] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1931.618617] env[62619]: value = "task-1778596" [ 1931.618617] env[62619]: _type = "Task" [ 1931.618617] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.626599] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778596, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.633274] env[62619]: DEBUG nova.objects.base [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1931.973633] env[62619]: DEBUG nova.scheduler.client.report [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1932.025933] env[62619]: DEBUG nova.compute.manager [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1932.026265] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1932.026567] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e800e68-b51c-4a55-8fa3-6d8431b61fd0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.031441] env[62619]: DEBUG nova.compute.manager [req-c6af4a0c-1c67-4490-a9c1-219821908418 req-0666f597-2f92-433d-ad3f-c9e0593b854d service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received event network-vif-unplugged-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1932.031441] env[62619]: DEBUG oslo_concurrency.lockutils [req-c6af4a0c-1c67-4490-a9c1-219821908418 req-0666f597-2f92-433d-ad3f-c9e0593b854d service nova] Acquiring lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1932.031913] env[62619]: DEBUG oslo_concurrency.lockutils [req-c6af4a0c-1c67-4490-a9c1-219821908418 req-0666f597-2f92-433d-ad3f-c9e0593b854d service nova] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1932.032064] env[62619]: DEBUG oslo_concurrency.lockutils [req-c6af4a0c-1c67-4490-a9c1-219821908418 req-0666f597-2f92-433d-ad3f-c9e0593b854d service nova] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1932.032248] env[62619]: DEBUG nova.compute.manager [req-c6af4a0c-1c67-4490-a9c1-219821908418 req-0666f597-2f92-433d-ad3f-c9e0593b854d service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] No waiting events found dispatching network-vif-unplugged-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1932.032524] env[62619]: WARNING nova.compute.manager [req-c6af4a0c-1c67-4490-a9c1-219821908418 req-0666f597-2f92-433d-ad3f-c9e0593b854d service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received unexpected event network-vif-unplugged-b52c0c61-cdaa-4ec8-b935-3229b930c548 for instance with vm_state shelved and task_state shelving_offloading. [ 1932.042674] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1932.042674] env[62619]: value = "task-1778597" [ 1932.042674] env[62619]: _type = "Task" [ 1932.042674] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.058595] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778597, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.129372] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778596, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472245} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.129665] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 9e69e4ab-f219-4ba8-8f66-6063d8ab242a/9e69e4ab-f219-4ba8-8f66-6063d8ab242a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1932.129889] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1932.130334] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-485b7b26-2637-4424-b0a2-47425262dedd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.135420] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1932.138050] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07a5cd4-60df-409e-aaf1-328674a8d191 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.142073] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1932.142073] env[62619]: value = "task-1778598" [ 1932.142073] env[62619]: _type = "Task" [ 1932.142073] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.149326] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1932.150212] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a25506b1-de6e-4405-9e86-1094b48e8b08 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.159393] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778598, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.243166] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1932.243431] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1932.243628] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleting the datastore file [datastore1] 13eeb4aa-0f20-4aed-9453-66afb0ff1152 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1932.243935] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae1b3e98-9f10-49d7-9a81-84c84170a937 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.252435] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1932.252435] env[62619]: value = "task-1778600" [ 1932.252435] env[62619]: _type = "Task" [ 1932.252435] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.261990] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.479539] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.246s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1932.485501] env[62619]: DEBUG nova.network.neutron [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Updating instance_info_cache with network_info: [{"id": "0de99671-66a1-4b86-9417-2955fdf1dcba", "address": "fa:16:3e:fc:2a:8e", "network": {"id": "6a1847de-b585-445d-8064-dc33dc365719", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1852054191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4010737526cd4a3aa36f15a187051010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0de99671-66", "ovs_interfaceid": "0de99671-66a1-4b86-9417-2955fdf1dcba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1932.536431] env[62619]: INFO nova.network.neutron [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updating port b78d1b6e-104b-4041-bcc5-5802f9f6fe3c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1932.554778] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778597, 'name': PowerOffVM_Task, 'duration_secs': 0.215224} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.554778] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1932.554778] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1932.554778] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369165', 'volume_id': 'd3e2944a-ae36-4f80-82db-68a3de2d143e', 'name': 'volume-d3e2944a-ae36-4f80-82db-68a3de2d143e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '21d9fc7a-228e-4b33-8534-55285d4e6e96', 'attached_at': '2024-12-11T22:57:55.000000', 'detached_at': '', 'volume_id': 'd3e2944a-ae36-4f80-82db-68a3de2d143e', 'serial': 'd3e2944a-ae36-4f80-82db-68a3de2d143e'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1932.555609] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d54573-efb6-40f5-b9d6-c71bb044a91f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.575288] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89fecb7-08c9-41a0-b45d-e61517881179 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.581984] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f307a2-d00d-4261-8f31-dc256150102f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.599872] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2486b133-2a69-4894-ab33-85497b92cfbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.615089] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] The volume has not been displaced from its original location: [datastore1] volume-d3e2944a-ae36-4f80-82db-68a3de2d143e/volume-d3e2944a-ae36-4f80-82db-68a3de2d143e.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1932.625723] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1932.626248] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4a6c122-e097-4a70-b1b5-f44ca3fd5e8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.648739] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1932.648739] env[62619]: value = "task-1778601" [ 1932.648739] env[62619]: _type = "Task" [ 1932.648739] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.654666] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778598, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069431} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.655333] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1932.656128] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86aeb164-d103-4017-9c64-406ec4b99243 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.661780] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778601, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.682345] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 9e69e4ab-f219-4ba8-8f66-6063d8ab242a/9e69e4ab-f219-4ba8-8f66-6063d8ab242a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1932.682713] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f83aff8-3c90-4572-8987-c22e55b6bd7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.702198] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1932.702198] env[62619]: value = "task-1778602" [ 1932.702198] env[62619]: _type = "Task" [ 1932.702198] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.710448] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778602, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.762673] env[62619]: DEBUG oslo_vmware.api [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147587} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.762934] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1932.763133] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1932.763309] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1932.789907] env[62619]: INFO nova.scheduler.client.report [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleted allocations for instance 13eeb4aa-0f20-4aed-9453-66afb0ff1152 [ 1932.988923] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Releasing lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.158749] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778601, 'name': ReconfigVM_Task, 'duration_secs': 0.379254} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.159039] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1933.163891] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25f9922d-71c2-4760-af74-807fc0b6c769 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.178366] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1933.178366] env[62619]: value = "task-1778603" [ 1933.178366] env[62619]: _type = "Task" [ 1933.178366] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.186263] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778603, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.211717] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778602, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.294613] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.294850] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.295083] env[62619]: DEBUG nova.objects.instance [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'resources' on Instance uuid 13eeb4aa-0f20-4aed-9453-66afb0ff1152 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1933.470067] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "314758ce-6522-47cf-8445-0d28b1b085b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.470365] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "314758ce-6522-47cf-8445-0d28b1b085b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.688234] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778603, 'name': ReconfigVM_Task, 'duration_secs': 0.355259} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.688532] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369165', 'volume_id': 'd3e2944a-ae36-4f80-82db-68a3de2d143e', 'name': 'volume-d3e2944a-ae36-4f80-82db-68a3de2d143e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '21d9fc7a-228e-4b33-8534-55285d4e6e96', 'attached_at': '2024-12-11T22:57:55.000000', 'detached_at': '', 'volume_id': 'd3e2944a-ae36-4f80-82db-68a3de2d143e', 'serial': 'd3e2944a-ae36-4f80-82db-68a3de2d143e'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1933.688800] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1933.689594] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1c3065-0bd8-44de-91dd-77ec0d85d1a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.695915] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1933.696143] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b774943d-70ac-4012-9155-2d77344900dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.710335] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778602, 'name': ReconfigVM_Task, 'duration_secs': 0.88774} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.710595] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 9e69e4ab-f219-4ba8-8f66-6063d8ab242a/9e69e4ab-f219-4ba8-8f66-6063d8ab242a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1933.711177] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-926418f9-0dc7-464d-ab03-80cdccd79bb2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.716181] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1933.716181] env[62619]: value = "task-1778605" [ 1933.716181] env[62619]: _type = "Task" [ 1933.716181] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.723844] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778605, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.766638] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1933.766861] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1933.767180] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleting the datastore file [datastore1] 21d9fc7a-228e-4b33-8534-55285d4e6e96 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1933.767528] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4662d5a3-9d80-4b56-9209-0f76a43b24d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.774066] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1933.774066] env[62619]: value = "task-1778606" [ 1933.774066] env[62619]: _type = "Task" [ 1933.774066] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.781856] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778606, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.797586] env[62619]: DEBUG nova.objects.instance [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'numa_topology' on Instance uuid 13eeb4aa-0f20-4aed-9453-66afb0ff1152 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1933.972730] env[62619]: DEBUG nova.compute.manager [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1933.994127] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1933.994446] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67cb75ea-002c-4bc9-b9b3-e6fa84dc0858 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.003758] env[62619]: DEBUG oslo_vmware.api [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1934.003758] env[62619]: value = "task-1778607" [ 1934.003758] env[62619]: _type = "Task" [ 1934.003758] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.013529] env[62619]: DEBUG oslo_vmware.api [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.015761] env[62619]: DEBUG nova.compute.manager [req-8ca25e9c-5c3e-4170-bb90-4c8bf5084844 req-e60690d7-dc29-4066-b3cf-74326089693a service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received event network-vif-plugged-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1934.015970] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ca25e9c-5c3e-4170-bb90-4c8bf5084844 req-e60690d7-dc29-4066-b3cf-74326089693a service nova] Acquiring lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.016479] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ca25e9c-5c3e-4170-bb90-4c8bf5084844 req-e60690d7-dc29-4066-b3cf-74326089693a service nova] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.016734] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ca25e9c-5c3e-4170-bb90-4c8bf5084844 req-e60690d7-dc29-4066-b3cf-74326089693a service nova] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.016947] env[62619]: DEBUG nova.compute.manager [req-8ca25e9c-5c3e-4170-bb90-4c8bf5084844 req-e60690d7-dc29-4066-b3cf-74326089693a service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] No waiting events found dispatching network-vif-plugged-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1934.017147] env[62619]: WARNING nova.compute.manager [req-8ca25e9c-5c3e-4170-bb90-4c8bf5084844 req-e60690d7-dc29-4066-b3cf-74326089693a service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received unexpected event network-vif-plugged-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c for instance with vm_state shelved_offloaded and task_state spawning. [ 1934.055918] env[62619]: DEBUG nova.compute.manager [req-fec47943-7e85-446d-af60-3953b904cb67 req-9e8219e4-2840-4f5c-a65c-7aac8d50a862 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received event network-changed-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1934.056136] env[62619]: DEBUG nova.compute.manager [req-fec47943-7e85-446d-af60-3953b904cb67 req-9e8219e4-2840-4f5c-a65c-7aac8d50a862 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Refreshing instance network info cache due to event network-changed-b52c0c61-cdaa-4ec8-b935-3229b930c548. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1934.056353] env[62619]: DEBUG oslo_concurrency.lockutils [req-fec47943-7e85-446d-af60-3953b904cb67 req-9e8219e4-2840-4f5c-a65c-7aac8d50a862 service nova] Acquiring lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.056531] env[62619]: DEBUG oslo_concurrency.lockutils [req-fec47943-7e85-446d-af60-3953b904cb67 req-9e8219e4-2840-4f5c-a65c-7aac8d50a862 service nova] Acquired lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.056644] env[62619]: DEBUG nova.network.neutron [req-fec47943-7e85-446d-af60-3953b904cb67 req-9e8219e4-2840-4f5c-a65c-7aac8d50a862 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Refreshing network info cache for port b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1934.109796] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.109973] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.110137] env[62619]: DEBUG nova.network.neutron [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1934.226142] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778605, 'name': Rename_Task, 'duration_secs': 0.191555} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.226428] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1934.226669] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-921fda85-5c9a-4779-8c18-7d49c80f8df0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.233340] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1934.233340] env[62619]: value = "task-1778608" [ 1934.233340] env[62619]: _type = "Task" [ 1934.233340] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.241384] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.284245] env[62619]: DEBUG oslo_vmware.api [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778606, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131791} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.284495] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1934.284620] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1934.284814] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1934.284979] env[62619]: INFO nova.compute.manager [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Took 2.26 seconds to destroy the instance on the hypervisor. [ 1934.285300] env[62619]: DEBUG oslo.service.loopingcall [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1934.285536] env[62619]: DEBUG nova.compute.manager [-] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1934.285643] env[62619]: DEBUG nova.network.neutron [-] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1934.302083] env[62619]: DEBUG nova.objects.base [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Object Instance<13eeb4aa-0f20-4aed-9453-66afb0ff1152> lazy-loaded attributes: resources,numa_topology {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1934.439542] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b227a543-18e7-46f6-a1a1-473fa3fd3994 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.449950] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879cc5b1-1a49-4b47-b7de-f7ce55462ac9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.501219] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fda8c6-1bc8-431e-ade0-7b3a22dee188 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.520809] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3c9ab4-7fd9-4bfd-b927-89cf155dc74b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.527118] env[62619]: DEBUG oslo_vmware.api [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778607, 'name': PowerOnVM_Task, 'duration_secs': 0.419008} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.528569] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.529089] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1934.529282] env[62619]: DEBUG nova.compute.manager [None req-a14d90fd-8a4b-4198-98b6-9951b195a32e tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1934.530968] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ced3f3-86ef-4dd3-b68a-2a076f8bfaf8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.545319] env[62619]: DEBUG nova.compute.provider_tree [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1934.743747] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778608, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.859310] env[62619]: DEBUG nova.network.neutron [req-fec47943-7e85-446d-af60-3953b904cb67 req-9e8219e4-2840-4f5c-a65c-7aac8d50a862 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updated VIF entry in instance network info cache for port b52c0c61-cdaa-4ec8-b935-3229b930c548. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1934.859786] env[62619]: DEBUG nova.network.neutron [req-fec47943-7e85-446d-af60-3953b904cb67 req-9e8219e4-2840-4f5c-a65c-7aac8d50a862 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updating instance_info_cache with network_info: [{"id": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "address": "fa:16:3e:58:c2:30", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapb52c0c61-cd", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.048464] env[62619]: DEBUG nova.scheduler.client.report [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1935.062115] env[62619]: DEBUG nova.network.neutron [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updating instance_info_cache with network_info: [{"id": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "address": "fa:16:3e:3b:fe:49", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb78d1b6e-10", "ovs_interfaceid": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.245159] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778608, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.362852] env[62619]: DEBUG oslo_concurrency.lockutils [req-fec47943-7e85-446d-af60-3953b904cb67 req-9e8219e4-2840-4f5c-a65c-7aac8d50a862 service nova] Releasing lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1935.464789] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.553655] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.259s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.556263] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.028s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.558028] env[62619]: INFO nova.compute.claims [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1935.566026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1935.583390] env[62619]: DEBUG nova.network.neutron [-] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.595416] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='f03eecc99b0458cc1459b7375a034248',container_format='bare',created_at=2024-12-11T22:58:05Z,direct_url=,disk_format='vmdk',id=8f8142ab-6142-4a84-93a9-2b4bd7e31086,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-697611303-shelved',owner='c82fb42e93ff479b971f49eb92f50832',properties=ImageMetaProps,protected=,size=31662080,status='active',tags=,updated_at=2024-12-11T22:58:19Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1935.595649] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1935.595801] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1935.595979] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1935.596469] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1935.596469] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1935.596639] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1935.596728] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1935.597850] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1935.597850] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1935.597850] env[62619]: DEBUG nova.virt.hardware [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1935.598102] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77c0054-d832-4637-bcb1-f26e7fdefb21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.607941] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a8a867-c782-4d10-a227-37ea95123bd8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.621945] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:fe:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '816c6e38-e200-4544-8c5b-9fc3e16c5761', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b78d1b6e-104b-4041-bcc5-5802f9f6fe3c', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1935.629107] env[62619]: DEBUG oslo.service.loopingcall [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1935.629380] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1935.629586] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ca79d5b-5967-4e79-8009-3a9d232c0b02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.649024] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1935.649024] env[62619]: value = "task-1778609" [ 1935.649024] env[62619]: _type = "Task" [ 1935.649024] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.658687] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778609, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.746074] env[62619]: DEBUG oslo_vmware.api [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778608, 'name': PowerOnVM_Task, 'duration_secs': 1.220518} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.746074] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1935.746074] env[62619]: INFO nova.compute.manager [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Took 8.28 seconds to spawn the instance on the hypervisor. [ 1935.746074] env[62619]: DEBUG nova.compute.manager [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1935.746644] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7de0c55-51c9-43c3-8472-367c45f61bb0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.042371] env[62619]: DEBUG nova.compute.manager [req-4ecd068c-b67e-4221-9478-91f6593dc079 req-60809d06-d94d-42c2-a5ac-f62d909553a5 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received event network-changed-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1936.042574] env[62619]: DEBUG nova.compute.manager [req-4ecd068c-b67e-4221-9478-91f6593dc079 req-60809d06-d94d-42c2-a5ac-f62d909553a5 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Refreshing instance network info cache due to event network-changed-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1936.042785] env[62619]: DEBUG oslo_concurrency.lockutils [req-4ecd068c-b67e-4221-9478-91f6593dc079 req-60809d06-d94d-42c2-a5ac-f62d909553a5 service nova] Acquiring lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.042926] env[62619]: DEBUG oslo_concurrency.lockutils [req-4ecd068c-b67e-4221-9478-91f6593dc079 req-60809d06-d94d-42c2-a5ac-f62d909553a5 service nova] Acquired lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.043100] env[62619]: DEBUG nova.network.neutron [req-4ecd068c-b67e-4221-9478-91f6593dc079 req-60809d06-d94d-42c2-a5ac-f62d909553a5 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Refreshing network info cache for port b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1936.065728] env[62619]: DEBUG oslo_concurrency.lockutils [None req-75fdc1fa-e61c-4fb8-b3ca-cd718904a235 tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.209s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.067299] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.602s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.067299] env[62619]: INFO nova.compute.manager [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Unshelving [ 1936.086477] env[62619]: INFO nova.compute.manager [-] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Took 1.80 seconds to deallocate network for instance. [ 1936.159027] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778609, 'name': CreateVM_Task, 'duration_secs': 0.332667} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.159226] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1936.160032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.160032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.160421] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1936.160672] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-738b1bcd-3cac-4fca-8841-7c5293962880 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.165605] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1936.165605] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527a2105-0484-b18f-3212-6b96b176c4bc" [ 1936.165605] env[62619]: _type = "Task" [ 1936.165605] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.173522] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527a2105-0484-b18f-3212-6b96b176c4bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.264115] env[62619]: INFO nova.compute.manager [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Took 13.89 seconds to build instance. [ 1936.632877] env[62619]: INFO nova.compute.manager [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Took 0.55 seconds to detach 1 volumes for instance. [ 1936.636842] env[62619]: DEBUG nova.compute.manager [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Deleting volume: d3e2944a-ae36-4f80-82db-68a3de2d143e {{(pid=62619) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1936.681777] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.682394] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Processing image 8f8142ab-6142-4a84-93a9-2b4bd7e31086 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1936.682394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086/8f8142ab-6142-4a84-93a9-2b4bd7e31086.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.682660] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086/8f8142ab-6142-4a84-93a9-2b4bd7e31086.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.682923] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1936.686796] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-438a6de5-f118-4d37-8079-1ec4741ab3f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.698434] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1936.699923] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1936.699923] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-668953be-d7e2-48d1-bcb2-05be0cc8eea0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.707456] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1936.707456] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52aee298-ec07-b2d5-6646-0d96ffd59f23" [ 1936.707456] env[62619]: _type = "Task" [ 1936.707456] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.711392] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1936.711564] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1936.716848] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52aee298-ec07-b2d5-6646-0d96ffd59f23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.748441] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9f1f75-8c14-493a-809e-3e5f8ee2d00b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.757211] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c8b0f0-d4c3-4bf1-8410-02e66e68f84f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.792296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-190ff5c2-d8ae-49f1-9fd3-353ccacc22d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.429s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.798224] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8dfba0-b8f3-4498-b30a-1754761f1469 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.806589] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83edde43-a713-41cd-b459-b061ba4f8d67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.823392] env[62619]: DEBUG nova.compute.provider_tree [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1936.875387] env[62619]: DEBUG nova.network.neutron [req-4ecd068c-b67e-4221-9478-91f6593dc079 req-60809d06-d94d-42c2-a5ac-f62d909553a5 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updated VIF entry in instance network info cache for port b78d1b6e-104b-4041-bcc5-5802f9f6fe3c. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1936.875766] env[62619]: DEBUG nova.network.neutron [req-4ecd068c-b67e-4221-9478-91f6593dc079 req-60809d06-d94d-42c2-a5ac-f62d909553a5 service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updating instance_info_cache with network_info: [{"id": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "address": "fa:16:3e:3b:fe:49", "network": {"id": "1b45dec3-d749-4aa4-a4bc-bc36e82f1686", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-104522219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c82fb42e93ff479b971f49eb92f50832", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb78d1b6e-10", "ovs_interfaceid": "b78d1b6e-104b-4041-bcc5-5802f9f6fe3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1937.091557] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.190930] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.219027] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Preparing fetch location {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1937.219027] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Fetch image to [datastore1] OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c/OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c.vmdk {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1937.219027] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Downloading stream optimized image 8f8142ab-6142-4a84-93a9-2b4bd7e31086 to [datastore1] OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c/OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c.vmdk on the data store datastore1 as vApp {{(pid=62619) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1937.219027] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Downloading image file data 8f8142ab-6142-4a84-93a9-2b4bd7e31086 to the ESX as VM named 'OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c' {{(pid=62619) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1937.246887] env[62619]: DEBUG nova.compute.manager [req-7b610396-cb27-42c1-a0ac-546d9f8f5163 req-fc0a61a0-0b6f-418e-8251-426954918942 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received event network-changed-35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1937.247085] env[62619]: DEBUG nova.compute.manager [req-7b610396-cb27-42c1-a0ac-546d9f8f5163 req-fc0a61a0-0b6f-418e-8251-426954918942 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing instance network info cache due to event network-changed-35375282-c697-45eb-a87c-d85555a9012b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1937.247486] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b610396-cb27-42c1-a0ac-546d9f8f5163 req-fc0a61a0-0b6f-418e-8251-426954918942 service nova] Acquiring lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.247636] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b610396-cb27-42c1-a0ac-546d9f8f5163 req-fc0a61a0-0b6f-418e-8251-426954918942 service nova] Acquired lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.247798] env[62619]: DEBUG nova.network.neutron [req-7b610396-cb27-42c1-a0ac-546d9f8f5163 req-fc0a61a0-0b6f-418e-8251-426954918942 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing network info cache for port 35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1937.311702] env[62619]: DEBUG oslo_vmware.rw_handles [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1937.311702] env[62619]: value = "resgroup-9" [ 1937.311702] env[62619]: _type = "ResourcePool" [ 1937.311702] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1937.312018] env[62619]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b90bd3fd-637a-4f3b-b212-d0e6682196ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.329306] env[62619]: DEBUG nova.scheduler.client.report [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1937.338883] env[62619]: DEBUG oslo_vmware.rw_handles [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lease: (returnval){ [ 1937.338883] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e955-eae4-4c23-79c2-4d92e49168a8" [ 1937.338883] env[62619]: _type = "HttpNfcLease" [ 1937.338883] env[62619]: } obtained for vApp import into resource pool (val){ [ 1937.338883] env[62619]: value = "resgroup-9" [ 1937.338883] env[62619]: _type = "ResourcePool" [ 1937.338883] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1937.339306] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the lease: (returnval){ [ 1937.339306] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e955-eae4-4c23-79c2-4d92e49168a8" [ 1937.339306] env[62619]: _type = "HttpNfcLease" [ 1937.339306] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1937.347573] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1937.347573] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e955-eae4-4c23-79c2-4d92e49168a8" [ 1937.347573] env[62619]: _type = "HttpNfcLease" [ 1937.347573] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1937.378228] env[62619]: DEBUG oslo_concurrency.lockutils [req-4ecd068c-b67e-4221-9478-91f6593dc079 req-60809d06-d94d-42c2-a5ac-f62d909553a5 service nova] Releasing lock "refresh_cache-a0953370-77f2-4e3b-a92e-cb12b3a82361" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.378504] env[62619]: DEBUG nova.compute.manager [req-4ecd068c-b67e-4221-9478-91f6593dc079 req-60809d06-d94d-42c2-a5ac-f62d909553a5 service nova] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Received event network-vif-deleted-ce3520ea-f75e-4d6a-a27f-de90d6383823 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1937.834858] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.279s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.835414] env[62619]: DEBUG nova.compute.manager [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1937.840297] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.749s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.840513] env[62619]: DEBUG nova.objects.instance [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'pci_requests' on Instance uuid 13eeb4aa-0f20-4aed-9453-66afb0ff1152 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1937.849486] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1937.849486] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e955-eae4-4c23-79c2-4d92e49168a8" [ 1937.849486] env[62619]: _type = "HttpNfcLease" [ 1937.849486] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1938.161944] env[62619]: DEBUG nova.network.neutron [req-7b610396-cb27-42c1-a0ac-546d9f8f5163 req-fc0a61a0-0b6f-418e-8251-426954918942 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updated VIF entry in instance network info cache for port 35375282-c697-45eb-a87c-d85555a9012b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1938.162345] env[62619]: DEBUG nova.network.neutron [req-7b610396-cb27-42c1-a0ac-546d9f8f5163 req-fc0a61a0-0b6f-418e-8251-426954918942 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [{"id": "35375282-c697-45eb-a87c-d85555a9012b", "address": "fa:16:3e:87:9d:97", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35375282-c6", "ovs_interfaceid": "35375282-c697-45eb-a87c-d85555a9012b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1938.343760] env[62619]: DEBUG nova.compute.utils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1938.349311] env[62619]: DEBUG nova.compute.manager [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1938.351511] env[62619]: DEBUG nova.objects.instance [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'numa_topology' on Instance uuid 13eeb4aa-0f20-4aed-9453-66afb0ff1152 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1938.359752] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1938.359752] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e955-eae4-4c23-79c2-4d92e49168a8" [ 1938.359752] env[62619]: _type = "HttpNfcLease" [ 1938.359752] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1938.668382] env[62619]: DEBUG oslo_concurrency.lockutils [req-7b610396-cb27-42c1-a0ac-546d9f8f5163 req-fc0a61a0-0b6f-418e-8251-426954918942 service nova] Releasing lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.851625] env[62619]: DEBUG nova.compute.manager [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1938.854173] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1938.854173] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e955-eae4-4c23-79c2-4d92e49168a8" [ 1938.854173] env[62619]: _type = "HttpNfcLease" [ 1938.854173] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1938.855502] env[62619]: INFO nova.compute.claims [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1938.858347] env[62619]: DEBUG nova.compute.manager [req-9fd6e411-17c1-460d-8ef5-f9a79151f3d3 req-031e46fc-64b2-4f2e-9f46-2e53e1b9e240 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Received event network-changed-56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1938.859008] env[62619]: DEBUG nova.compute.manager [req-9fd6e411-17c1-460d-8ef5-f9a79151f3d3 req-031e46fc-64b2-4f2e-9f46-2e53e1b9e240 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Refreshing instance network info cache due to event network-changed-56f48ead-6f6e-451e-af3c-2634f9797c5e. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1938.859008] env[62619]: DEBUG oslo_concurrency.lockutils [req-9fd6e411-17c1-460d-8ef5-f9a79151f3d3 req-031e46fc-64b2-4f2e-9f46-2e53e1b9e240 service nova] Acquiring lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.859008] env[62619]: DEBUG oslo_concurrency.lockutils [req-9fd6e411-17c1-460d-8ef5-f9a79151f3d3 req-031e46fc-64b2-4f2e-9f46-2e53e1b9e240 service nova] Acquired lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.859008] env[62619]: DEBUG nova.network.neutron [req-9fd6e411-17c1-460d-8ef5-f9a79151f3d3 req-031e46fc-64b2-4f2e-9f46-2e53e1b9e240 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Refreshing network info cache for port 56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1939.303995] env[62619]: DEBUG nova.compute.manager [req-c45c446f-0ed5-4079-bf9a-71f563363908 req-42eed889-cdfd-4acc-b957-4e416c7ae15f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Received event network-changed-56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1939.303995] env[62619]: DEBUG nova.compute.manager [req-c45c446f-0ed5-4079-bf9a-71f563363908 req-42eed889-cdfd-4acc-b957-4e416c7ae15f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Refreshing instance network info cache due to event network-changed-56f48ead-6f6e-451e-af3c-2634f9797c5e. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1939.303995] env[62619]: DEBUG oslo_concurrency.lockutils [req-c45c446f-0ed5-4079-bf9a-71f563363908 req-42eed889-cdfd-4acc-b957-4e416c7ae15f service nova] Acquiring lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.351662] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1939.351662] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e955-eae4-4c23-79c2-4d92e49168a8" [ 1939.351662] env[62619]: _type = "HttpNfcLease" [ 1939.351662] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1939.569767] env[62619]: DEBUG nova.network.neutron [req-9fd6e411-17c1-460d-8ef5-f9a79151f3d3 req-031e46fc-64b2-4f2e-9f46-2e53e1b9e240 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updated VIF entry in instance network info cache for port 56f48ead-6f6e-451e-af3c-2634f9797c5e. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1939.570059] env[62619]: DEBUG nova.network.neutron [req-9fd6e411-17c1-460d-8ef5-f9a79151f3d3 req-031e46fc-64b2-4f2e-9f46-2e53e1b9e240 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updating instance_info_cache with network_info: [{"id": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "address": "fa:16:3e:8b:e2:fc", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f48ead-6f", "ovs_interfaceid": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1939.853614] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1939.853614] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e955-eae4-4c23-79c2-4d92e49168a8" [ 1939.853614] env[62619]: _type = "HttpNfcLease" [ 1939.853614] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1939.853967] env[62619]: DEBUG oslo_vmware.rw_handles [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1939.853967] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e4e955-eae4-4c23-79c2-4d92e49168a8" [ 1939.853967] env[62619]: _type = "HttpNfcLease" [ 1939.853967] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1939.854681] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab9b223-086c-48e7-982a-31730c3d46ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.863240] env[62619]: DEBUG nova.compute.manager [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1939.865240] env[62619]: DEBUG oslo_vmware.rw_handles [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bb7d1-6cce-89bd-60e5-ab4114f5097f/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1939.865434] env[62619]: DEBUG oslo_vmware.rw_handles [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating HTTP connection to write to file with size = 31662080 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bb7d1-6cce-89bd-60e5-ab4114f5097f/disk-0.vmdk. {{(pid=62619) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1939.932616] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6a8aeb83-4066-4239-8317-a6fe6ed556cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.938261] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1939.938545] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1939.938707] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1939.938887] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1939.939041] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1939.939189] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1939.939424] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1939.939599] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1939.939782] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1939.939956] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1939.940171] env[62619]: DEBUG nova.virt.hardware [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1939.942516] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73a1b73-2562-4edc-8807-4d27ff28fb7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.954480] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34519be-0176-45d8-aded-6e99e6fceaff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.971907] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1939.977792] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Creating folder: Project (b49413cd1cf14d19a96e9590c22e4203). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1939.980757] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-904ef004-3b54-4025-9e5c-f17789617061 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.992965] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Created folder: Project (b49413cd1cf14d19a96e9590c22e4203) in parent group-v368875. [ 1939.993160] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Creating folder: Instances. Parent ref: group-v369187. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1939.995958] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d2fd7e9-6cac-4fe9-9d02-5cb0193978f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.005748] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Created folder: Instances in parent group-v369187. [ 1940.006021] env[62619]: DEBUG oslo.service.loopingcall [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1940.006238] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1940.006447] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2bf5c67-2f5a-4d1b-862f-722b340a430d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.028431] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1940.028431] env[62619]: value = "task-1778614" [ 1940.028431] env[62619]: _type = "Task" [ 1940.028431] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.036037] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778614, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.072150] env[62619]: DEBUG oslo_concurrency.lockutils [req-9fd6e411-17c1-460d-8ef5-f9a79151f3d3 req-031e46fc-64b2-4f2e-9f46-2e53e1b9e240 service nova] Releasing lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.072758] env[62619]: DEBUG oslo_concurrency.lockutils [req-c45c446f-0ed5-4079-bf9a-71f563363908 req-42eed889-cdfd-4acc-b957-4e416c7ae15f service nova] Acquired lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1940.072949] env[62619]: DEBUG nova.network.neutron [req-c45c446f-0ed5-4079-bf9a-71f563363908 req-42eed889-cdfd-4acc-b957-4e416c7ae15f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Refreshing network info cache for port 56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1940.083330] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daa3588-ff70-48c4-91cc-8987fe3966ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.092348] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cf2b33-5372-4392-8ec3-59e108c4f3e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.125319] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c3c274-1dfc-4133-98b0-91befdae8ef7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.133689] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb21eb4-df45-4f4f-a065-256f5f360530 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.147595] env[62619]: DEBUG nova.compute.provider_tree [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1940.543290] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778614, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.651124] env[62619]: DEBUG nova.scheduler.client.report [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1940.823551] env[62619]: DEBUG nova.network.neutron [req-c45c446f-0ed5-4079-bf9a-71f563363908 req-42eed889-cdfd-4acc-b957-4e416c7ae15f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updated VIF entry in instance network info cache for port 56f48ead-6f6e-451e-af3c-2634f9797c5e. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1940.823959] env[62619]: DEBUG nova.network.neutron [req-c45c446f-0ed5-4079-bf9a-71f563363908 req-42eed889-cdfd-4acc-b957-4e416c7ae15f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updating instance_info_cache with network_info: [{"id": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "address": "fa:16:3e:8b:e2:fc", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f48ead-6f", "ovs_interfaceid": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.881257] env[62619]: DEBUG nova.compute.manager [req-11edcf5f-1e38-41cc-b114-cff243bacfaf req-7e1a5214-7981-4b2c-8fdc-457e7f7f2380 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received event network-changed-35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1940.881581] env[62619]: DEBUG nova.compute.manager [req-11edcf5f-1e38-41cc-b114-cff243bacfaf req-7e1a5214-7981-4b2c-8fdc-457e7f7f2380 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing instance network info cache due to event network-changed-35375282-c697-45eb-a87c-d85555a9012b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1940.881842] env[62619]: DEBUG oslo_concurrency.lockutils [req-11edcf5f-1e38-41cc-b114-cff243bacfaf req-7e1a5214-7981-4b2c-8fdc-457e7f7f2380 service nova] Acquiring lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1940.881989] env[62619]: DEBUG oslo_concurrency.lockutils [req-11edcf5f-1e38-41cc-b114-cff243bacfaf req-7e1a5214-7981-4b2c-8fdc-457e7f7f2380 service nova] Acquired lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1940.882164] env[62619]: DEBUG nova.network.neutron [req-11edcf5f-1e38-41cc-b114-cff243bacfaf req-7e1a5214-7981-4b2c-8fdc-457e7f7f2380 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing network info cache for port 35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1941.039489] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778614, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.156507] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.316s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.159021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.968s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.159299] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.179742] env[62619]: INFO nova.scheduler.client.report [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted allocations for instance 21d9fc7a-228e-4b33-8534-55285d4e6e96 [ 1941.191303] env[62619]: INFO nova.network.neutron [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updating port b52c0c61-cdaa-4ec8-b935-3229b930c548 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1941.235043] env[62619]: DEBUG oslo_vmware.rw_handles [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Completed reading data from the image iterator. {{(pid=62619) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1941.235226] env[62619]: DEBUG oslo_vmware.rw_handles [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bb7d1-6cce-89bd-60e5-ab4114f5097f/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1941.236446] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d0b8be-2921-45a6-9059-bbeb305c36e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.243851] env[62619]: DEBUG oslo_vmware.rw_handles [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bb7d1-6cce-89bd-60e5-ab4114f5097f/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1941.244036] env[62619]: DEBUG oslo_vmware.rw_handles [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bb7d1-6cce-89bd-60e5-ab4114f5097f/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1941.244517] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-8f7e320f-abe1-4064-9f67-3fd7a13368c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.327543] env[62619]: DEBUG oslo_concurrency.lockutils [req-c45c446f-0ed5-4079-bf9a-71f563363908 req-42eed889-cdfd-4acc-b957-4e416c7ae15f service nova] Releasing lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1941.540150] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778614, 'name': CreateVM_Task, 'duration_secs': 1.341875} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.540338] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1941.540843] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1941.541018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1941.541338] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1941.541609] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b4fde81-af67-4d1e-8034-197565573ca1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.546178] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1941.546178] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520108e1-b0f9-63f5-2f75-2cff7d9f4c6a" [ 1941.546178] env[62619]: _type = "Task" [ 1941.546178] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.555583] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520108e1-b0f9-63f5-2f75-2cff7d9f4c6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.596160] env[62619]: DEBUG oslo_vmware.rw_handles [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526bb7d1-6cce-89bd-60e5-ab4114f5097f/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1941.596390] env[62619]: INFO nova.virt.vmwareapi.images [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Downloaded image file data 8f8142ab-6142-4a84-93a9-2b4bd7e31086 [ 1941.597264] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafc7f33-00a8-4fcb-be9e-a16fcd4d4ccc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.612755] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a64ec686-fae2-413d-8b0d-698c85c80796 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.689115] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d46f7bf0-4c3c-49aa-b692-8063556cc92f tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "21d9fc7a-228e-4b33-8534-55285d4e6e96" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.171s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.694764] env[62619]: DEBUG nova.network.neutron [req-11edcf5f-1e38-41cc-b114-cff243bacfaf req-7e1a5214-7981-4b2c-8fdc-457e7f7f2380 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updated VIF entry in instance network info cache for port 35375282-c697-45eb-a87c-d85555a9012b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1941.695141] env[62619]: DEBUG nova.network.neutron [req-11edcf5f-1e38-41cc-b114-cff243bacfaf req-7e1a5214-7981-4b2c-8fdc-457e7f7f2380 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [{"id": "35375282-c697-45eb-a87c-d85555a9012b", "address": "fa:16:3e:87:9d:97", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35375282-c6", "ovs_interfaceid": "35375282-c697-45eb-a87c-d85555a9012b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1941.709293] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1941.712878] env[62619]: INFO nova.virt.vmwareapi.images [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] The imported VM was unregistered [ 1941.715258] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Caching image {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1941.715455] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Creating directory with path [datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1941.715709] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea5777c2-41b7-42a9-91ce-6021869aae5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.744897] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Created directory with path [datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1941.745117] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c/OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c.vmdk to [datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086/8f8142ab-6142-4a84-93a9-2b4bd7e31086.vmdk. {{(pid=62619) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1941.745381] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-306eab2b-0394-4e9e-8727-b6b43e2df135 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.753155] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1941.753155] env[62619]: value = "task-1778616" [ 1941.753155] env[62619]: _type = "Task" [ 1941.753155] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.760774] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778616, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.058092] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]520108e1-b0f9-63f5-2f75-2cff7d9f4c6a, 'name': SearchDatastore_Task, 'duration_secs': 0.02708} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.058438] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1942.058627] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1942.058881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.059043] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.059235] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1942.059519] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06bed67b-71eb-4b95-87fb-3e80a84e9216 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.071613] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1942.071785] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1942.072994] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6b98637-6ba0-46dd-87cb-cbe3c9062ccf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.078030] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1942.078030] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5259a5e6-164b-b3eb-f0aa-df79f46385d0" [ 1942.078030] env[62619]: _type = "Task" [ 1942.078030] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.086418] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5259a5e6-164b-b3eb-f0aa-df79f46385d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.198249] env[62619]: DEBUG oslo_concurrency.lockutils [req-11edcf5f-1e38-41cc-b114-cff243bacfaf req-7e1a5214-7981-4b2c-8fdc-457e7f7f2380 service nova] Releasing lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1942.263323] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778616, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.574191] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "70265068-1185-4f23-b0b4-ed2378c17a89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.574502] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "70265068-1185-4f23-b0b4-ed2378c17a89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.574731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "70265068-1185-4f23-b0b4-ed2378c17a89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.574917] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "70265068-1185-4f23-b0b4-ed2378c17a89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.575122] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "70265068-1185-4f23-b0b4-ed2378c17a89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.577416] env[62619]: INFO nova.compute.manager [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Terminating instance [ 1942.589379] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5259a5e6-164b-b3eb-f0aa-df79f46385d0, 'name': SearchDatastore_Task, 'duration_secs': 0.040379} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.591015] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd4d9895-1a44-465a-9ab4-9d63279a3ca8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.597514] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1942.597514] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525dc453-70d0-9d60-8ce9-f80e6b51dc5c" [ 1942.597514] env[62619]: _type = "Task" [ 1942.597514] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.605849] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525dc453-70d0-9d60-8ce9-f80e6b51dc5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.709331] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1942.709620] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1942.763912] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778616, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.888672] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.888672] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.888672] env[62619]: DEBUG nova.network.neutron [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1942.911559] env[62619]: DEBUG nova.compute.manager [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received event network-vif-plugged-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1942.911688] env[62619]: DEBUG oslo_concurrency.lockutils [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] Acquiring lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.911889] env[62619]: DEBUG oslo_concurrency.lockutils [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.912112] env[62619]: DEBUG oslo_concurrency.lockutils [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.912286] env[62619]: DEBUG nova.compute.manager [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] No waiting events found dispatching network-vif-plugged-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1942.912449] env[62619]: WARNING nova.compute.manager [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received unexpected event network-vif-plugged-b52c0c61-cdaa-4ec8-b935-3229b930c548 for instance with vm_state shelved_offloaded and task_state spawning. [ 1942.912610] env[62619]: DEBUG nova.compute.manager [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received event network-changed-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1942.912759] env[62619]: DEBUG nova.compute.manager [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Refreshing instance network info cache due to event network-changed-b52c0c61-cdaa-4ec8-b935-3229b930c548. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1942.912923] env[62619]: DEBUG oslo_concurrency.lockutils [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] Acquiring lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.084273] env[62619]: DEBUG nova.compute.manager [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1943.084688] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1943.085444] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae78f85-24bb-46ff-aa62-f108e40e5deb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.093728] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1943.093992] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85770c58-f5f8-4718-9c38-ec1220382ba1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.103119] env[62619]: DEBUG oslo_vmware.api [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1943.103119] env[62619]: value = "task-1778617" [ 1943.103119] env[62619]: _type = "Task" [ 1943.103119] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.111344] env[62619]: DEBUG oslo_vmware.api [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778617, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.116033] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525dc453-70d0-9d60-8ce9-f80e6b51dc5c, 'name': SearchDatastore_Task, 'duration_secs': 0.080051} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.116370] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.116720] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 314758ce-6522-47cf-8445-0d28b1b085b9/314758ce-6522-47cf-8445-0d28b1b085b9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1943.117138] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-671bb320-063e-40f2-87a5-73ec601a4211 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.129506] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1943.129506] env[62619]: value = "task-1778618" [ 1943.129506] env[62619]: _type = "Task" [ 1943.129506] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.141264] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.212522] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.212784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.212984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1943.213238] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1943.214208] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba820728-e6d0-46f7-b386-2e7e848d6af1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.227090] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59ecfc6-25cf-415a-b8bd-29c953e8a461 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.249547] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a713e0-6932-42d8-84ed-36b5e98572be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.265717] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7392dcd3-6ea8-4b8b-974a-f3dc5348c7b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.269358] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778616, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.298219] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179391MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1943.298380] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.298588] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.614342] env[62619]: DEBUG oslo_vmware.api [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778617, 'name': PowerOffVM_Task, 'duration_secs': 0.313486} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.614629] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1943.614795] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1943.615065] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1de555d2-5808-4b45-b702-a7e9e6bc8737 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.645162] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.668734] env[62619]: DEBUG nova.network.neutron [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updating instance_info_cache with network_info: [{"id": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "address": "fa:16:3e:58:c2:30", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb52c0c61-cd", "ovs_interfaceid": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1943.766313] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1943.766575] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1943.766759] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleting the datastore file [datastore1] 70265068-1185-4f23-b0b4-ed2378c17a89 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1943.770159] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fefbb8d-5440-4c1b-a259-92a0c50e51d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.772157] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778616, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.778952] env[62619]: DEBUG oslo_vmware.api [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1943.778952] env[62619]: value = "task-1778620" [ 1943.778952] env[62619]: _type = "Task" [ 1943.778952] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.788921] env[62619]: DEBUG oslo_vmware.api [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778620, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.143439] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.171357] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1944.173912] env[62619]: DEBUG oslo_concurrency.lockutils [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] Acquired lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1944.174137] env[62619]: DEBUG nova.network.neutron [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Refreshing network info cache for port b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1944.203394] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='61e8f5dc3cd2c4cb834b3d220022d947',container_format='bare',created_at=2024-12-11T22:58:14Z,direct_url=,disk_format='vmdk',id=d6107f81-5dc7-4bd9-877a-caaf1e8d2265,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-217209062-shelved',owner='0a8f5f9386ba4dfa869c288a30aaeada',properties=ImageMetaProps,protected=,size=31662080,status='active',tags=,updated_at=2024-12-11T22:58:29Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1944.203664] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1944.203822] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1944.204100] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1944.204273] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1944.204429] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1944.204632] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1944.204785] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1944.205016] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1944.205232] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1944.205407] env[62619]: DEBUG nova.virt.hardware [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1944.206415] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7afc7ce-2261-44fb-bd1b-7c65525d32db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.218410] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db54147-7689-41f0-945d-3a4db1a8c4e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.236224] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:c2:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b52c0c61-cdaa-4ec8-b935-3229b930c548', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1944.243854] env[62619]: DEBUG oslo.service.loopingcall [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1944.244194] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1944.244443] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-438e3965-2474-4f25-bdae-50434c90fcbe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.268129] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778616, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.272077] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1944.272077] env[62619]: value = "task-1778621" [ 1944.272077] env[62619]: _type = "Task" [ 1944.272077] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.286221] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778621, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.291388] env[62619]: DEBUG oslo_vmware.api [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778620, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.329706] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1944.329706] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 70265068-1185-4f23-b0b4-ed2378c17a89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1944.329706] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4c66bbdf-af6a-4705-8219-85cf19f8314e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1944.329706] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cec0ea75-042d-4ee5-91d5-cad86456ab97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1944.329706] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance a2064f8f-b928-44c0-86d8-c0bb9882dbde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1944.329993] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 9e69e4ab-f219-4ba8-8f66-6063d8ab242a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1944.329993] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance a0953370-77f2-4e3b-a92e-cb12b3a82361 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1944.329993] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 314758ce-6522-47cf-8445-0d28b1b085b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1944.330244] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 13eeb4aa-0f20-4aed-9453-66afb0ff1152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1944.330383] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1944.330588] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1944.456762] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253d5a86-6a48-4537-9b72-4b62476f447a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.464871] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfcf69d-3a5b-4a65-893f-177763ce9b2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.497816] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6d6beb-21e8-4e1d-b7b4-582f27729044 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.507227] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18740e4-88bf-4809-8b39-7df94aea4b17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.522950] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1944.641409] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778618, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.770062] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778616, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.709353} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.770390] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c/OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c.vmdk to [datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086/8f8142ab-6142-4a84-93a9-2b4bd7e31086.vmdk. [ 1944.770590] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Cleaning up location [datastore1] OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1944.770736] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_e7c9a38b-cc97-4b06-bde9-2604279a187c {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1944.770986] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2348ba40-7092-4e8a-ab89-9662a8528683 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.781529] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778621, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.785289] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1944.785289] env[62619]: value = "task-1778622" [ 1944.785289] env[62619]: _type = "Task" [ 1944.785289] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.791114] env[62619]: DEBUG oslo_vmware.api [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778620, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.940356} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.791658] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1944.791837] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1944.792027] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1944.792193] env[62619]: INFO nova.compute.manager [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1944.792421] env[62619]: DEBUG oslo.service.loopingcall [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1944.792939] env[62619]: DEBUG nova.compute.manager [-] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1944.793036] env[62619]: DEBUG nova.network.neutron [-] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1944.797018] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.025945] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1945.035254] env[62619]: DEBUG nova.network.neutron [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updated VIF entry in instance network info cache for port b52c0c61-cdaa-4ec8-b935-3229b930c548. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1945.035602] env[62619]: DEBUG nova.network.neutron [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updating instance_info_cache with network_info: [{"id": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "address": "fa:16:3e:58:c2:30", "network": {"id": "eb301569-75fa-4c77-b33e-70b08067e071", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-256489772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a8f5f9386ba4dfa869c288a30aaeada", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb52c0c61-cd", "ovs_interfaceid": "b52c0c61-cdaa-4ec8-b935-3229b930c548", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1945.120331] env[62619]: DEBUG nova.compute.manager [req-84fbac0d-099e-438f-897d-8fadd05120cd req-ad2cfe8b-0ace-421b-914b-3eef591503c5 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Received event network-vif-deleted-d8de64b8-2687-42e0-91e6-97aa76f28d9f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1945.120777] env[62619]: INFO nova.compute.manager [req-84fbac0d-099e-438f-897d-8fadd05120cd req-ad2cfe8b-0ace-421b-914b-3eef591503c5 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Neutron deleted interface d8de64b8-2687-42e0-91e6-97aa76f28d9f; detaching it from the instance and deleting it from the info cache [ 1945.120777] env[62619]: DEBUG nova.network.neutron [req-84fbac0d-099e-438f-897d-8fadd05120cd req-ad2cfe8b-0ace-421b-914b-3eef591503c5 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1945.142655] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778618, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.593964} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.142954] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 314758ce-6522-47cf-8445-0d28b1b085b9/314758ce-6522-47cf-8445-0d28b1b085b9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1945.143189] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1945.143449] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1f64ef1-81e9-4ec1-a207-5930f90c6a1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.150260] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1945.150260] env[62619]: value = "task-1778623" [ 1945.150260] env[62619]: _type = "Task" [ 1945.150260] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.159838] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778623, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.282034] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778621, 'name': CreateVM_Task, 'duration_secs': 0.644416} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.282284] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1945.291672] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.291862] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.292276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1945.292572] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97853ec5-a45b-431a-a061-e21365664e66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.297465] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033639} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.298087] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1945.298329] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086/8f8142ab-6142-4a84-93a9-2b4bd7e31086.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.298535] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086/8f8142ab-6142-4a84-93a9-2b4bd7e31086.vmdk to [datastore1] a0953370-77f2-4e3b-a92e-cb12b3a82361/a0953370-77f2-4e3b-a92e-cb12b3a82361.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1945.298816] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e9867e9-4f1c-438b-9e31-439c5edbf142 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.301970] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1945.301970] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f7267b-243d-2cef-fb27-267af569a4eb" [ 1945.301970] env[62619]: _type = "Task" [ 1945.301970] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.307631] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1945.307631] env[62619]: value = "task-1778624" [ 1945.307631] env[62619]: _type = "Task" [ 1945.307631] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.311106] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52f7267b-243d-2cef-fb27-267af569a4eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.321415] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.531498] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1945.531724] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.233s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.537602] env[62619]: DEBUG oslo_concurrency.lockutils [req-78c4d07f-335f-4248-898c-48550c4d736c req-74a739a8-eb07-4719-b388-6479fafac096 service nova] Releasing lock "refresh_cache-13eeb4aa-0f20-4aed-9453-66afb0ff1152" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.600420] env[62619]: DEBUG nova.network.neutron [-] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1945.622697] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39f0dd09-74fb-4aad-9ce0-d2ae95168fb2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.632814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9835eff0-384f-4c8f-925e-28d3a4c8c255 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.662972] env[62619]: DEBUG nova.compute.manager [req-84fbac0d-099e-438f-897d-8fadd05120cd req-ad2cfe8b-0ace-421b-914b-3eef591503c5 service nova] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Detach interface failed, port_id=d8de64b8-2687-42e0-91e6-97aa76f28d9f, reason: Instance 70265068-1185-4f23-b0b4-ed2378c17a89 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1945.668204] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778623, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.29721} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.668451] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1945.669231] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67d752e-a039-4374-821a-67e9954edbd5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.688823] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 314758ce-6522-47cf-8445-0d28b1b085b9/314758ce-6522-47cf-8445-0d28b1b085b9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1945.689064] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb482e05-0655-496b-af71-d6f5b8d097e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.708751] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1945.708751] env[62619]: value = "task-1778625" [ 1945.708751] env[62619]: _type = "Task" [ 1945.708751] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.716515] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778625, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.816027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.816217] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Processing image d6107f81-5dc7-4bd9-877a-caaf1e8d2265 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1945.818048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265/d6107f81-5dc7-4bd9-877a-caaf1e8d2265.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.818048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265/d6107f81-5dc7-4bd9-877a-caaf1e8d2265.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.818048] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1945.818048] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-278b4be7-09f7-4515-b9fd-e89b16c0380b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.825458] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.831984] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1945.832140] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1945.833054] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64f2e4a3-0399-479e-82d4-9e771111748b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.838299] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1945.838299] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d27b47-025e-7841-aad0-47d4db01590d" [ 1945.838299] env[62619]: _type = "Task" [ 1945.838299] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.845573] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d27b47-025e-7841-aad0-47d4db01590d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.103409] env[62619]: INFO nova.compute.manager [-] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Took 1.31 seconds to deallocate network for instance. [ 1946.219838] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.322933] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778624, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.348669] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Preparing fetch location {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1946.348856] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Fetch image to [datastore1] OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c/OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c.vmdk {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1946.348910] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Downloading stream optimized image d6107f81-5dc7-4bd9-877a-caaf1e8d2265 to [datastore1] OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c/OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c.vmdk on the data store datastore1 as vApp {{(pid=62619) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1946.349098] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Downloading image file data d6107f81-5dc7-4bd9-877a-caaf1e8d2265 to the ESX as VM named 'OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c' {{(pid=62619) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1946.439216] env[62619]: DEBUG oslo_vmware.rw_handles [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1946.439216] env[62619]: value = "resgroup-9" [ 1946.439216] env[62619]: _type = "ResourcePool" [ 1946.439216] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1946.439698] env[62619]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-9e0a6f0d-8126-4d22-ac52-0297fb294344 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.462172] env[62619]: DEBUG oslo_vmware.rw_handles [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lease: (returnval){ [ 1946.462172] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5223feb0-0f15-f017-c786-9aed72ff5e11" [ 1946.462172] env[62619]: _type = "HttpNfcLease" [ 1946.462172] env[62619]: } obtained for vApp import into resource pool (val){ [ 1946.462172] env[62619]: value = "resgroup-9" [ 1946.462172] env[62619]: _type = "ResourcePool" [ 1946.462172] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1946.462561] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the lease: (returnval){ [ 1946.462561] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5223feb0-0f15-f017-c786-9aed72ff5e11" [ 1946.462561] env[62619]: _type = "HttpNfcLease" [ 1946.462561] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1946.469526] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1946.469526] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5223feb0-0f15-f017-c786-9aed72ff5e11" [ 1946.469526] env[62619]: _type = "HttpNfcLease" [ 1946.469526] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1946.531365] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.531647] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.531647] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1946.611423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.611888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.612204] env[62619]: DEBUG nova.objects.instance [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lazy-loading 'resources' on Instance uuid 70265068-1185-4f23-b0b4-ed2378c17a89 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1946.718828] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778625, 'name': ReconfigVM_Task, 'duration_secs': 0.948774} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.719118] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 314758ce-6522-47cf-8445-0d28b1b085b9/314758ce-6522-47cf-8445-0d28b1b085b9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1946.719790] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95b2baa3-1994-4c5e-ba19-ac3a0dc235fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.727566] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1946.727566] env[62619]: value = "task-1778627" [ 1946.727566] env[62619]: _type = "Task" [ 1946.727566] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.736702] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778627, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.826096] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778624, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.971993] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1946.971993] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5223feb0-0f15-f017-c786-9aed72ff5e11" [ 1946.971993] env[62619]: _type = "HttpNfcLease" [ 1946.971993] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1947.237597] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778627, 'name': Rename_Task, 'duration_secs': 0.496749} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.237857] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1947.240422] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83fc3c61-5fc2-459d-aa98-aa165b9d557c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.247671] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1947.247671] env[62619]: value = "task-1778628" [ 1947.247671] env[62619]: _type = "Task" [ 1947.247671] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.256562] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778628, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.258262] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c24b94-31c4-4a95-9e43-5dd923e71187 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.265275] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab51790f-20bc-403d-9236-91f0f1f47682 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.295925] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea1dceb-d144-4e6e-936b-ed3dcf48ed10 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.303816] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f64c19-857b-4515-8072-f221b406c798 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.318861] env[62619]: DEBUG nova.compute.provider_tree [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1947.328601] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778624, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.472421] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1947.472421] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5223feb0-0f15-f017-c786-9aed72ff5e11" [ 1947.472421] env[62619]: _type = "HttpNfcLease" [ 1947.472421] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1947.472421] env[62619]: DEBUG oslo_vmware.rw_handles [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1947.472421] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5223feb0-0f15-f017-c786-9aed72ff5e11" [ 1947.472421] env[62619]: _type = "HttpNfcLease" [ 1947.472421] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1947.473172] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5b068f-27f0-4323-b078-7ae89c140e7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.481288] env[62619]: DEBUG oslo_vmware.rw_handles [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52da2959-f8c9-bc56-7221-ec2a1a564b49/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1947.481463] env[62619]: DEBUG oslo_vmware.rw_handles [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating HTTP connection to write to file with size = 31662080 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52da2959-f8c9-bc56-7221-ec2a1a564b49/disk-0.vmdk. {{(pid=62619) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1947.545316] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cf930905-a254-4347-8060-ac392b64499c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.757736] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778628, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.830248] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778624, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.843043] env[62619]: ERROR nova.scheduler.client.report [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [req-85970090-0de3-4294-af73-8649d29a583b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID e814b747-ed75-487b-a97d-acf66bc6db0b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-85970090-0de3-4294-af73-8649d29a583b"}]} [ 1947.860783] env[62619]: DEBUG nova.scheduler.client.report [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1947.881236] env[62619]: DEBUG nova.scheduler.client.report [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1947.881478] env[62619]: DEBUG nova.compute.provider_tree [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1947.895034] env[62619]: DEBUG nova.scheduler.client.report [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1947.923078] env[62619]: DEBUG nova.scheduler.client.report [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1948.071107] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072cb187-ef84-46c4-81db-ce1a615c4b44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.083573] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b67d58-a879-40a5-9121-2f2efae61ab7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.120836] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e18364-59a1-4b94-9b07-a5719e1df01f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.130088] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611983d8-18a3-4cee-a520-072e245fcb3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.146026] env[62619]: DEBUG nova.compute.provider_tree [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1948.260718] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778628, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.330960] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778624, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.780488} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.331252] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8f8142ab-6142-4a84-93a9-2b4bd7e31086/8f8142ab-6142-4a84-93a9-2b4bd7e31086.vmdk to [datastore1] a0953370-77f2-4e3b-a92e-cb12b3a82361/a0953370-77f2-4e3b-a92e-cb12b3a82361.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1948.332057] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71ea749-98f0-41a9-8a87-12da40b55de6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.356706] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] a0953370-77f2-4e3b-a92e-cb12b3a82361/a0953370-77f2-4e3b-a92e-cb12b3a82361.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1948.360229] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c81d24f9-4a5f-44de-96d0-1fdaf87562ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.384411] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1948.384411] env[62619]: value = "task-1778629" [ 1948.384411] env[62619]: _type = "Task" [ 1948.384411] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.394148] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778629, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.678487] env[62619]: DEBUG nova.scheduler.client.report [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 171 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1948.678840] env[62619]: DEBUG nova.compute.provider_tree [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 171 to 172 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1948.679134] env[62619]: DEBUG nova.compute.provider_tree [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1948.760677] env[62619]: DEBUG oslo_vmware.api [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778628, 'name': PowerOnVM_Task, 'duration_secs': 1.044021} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.760943] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1948.761180] env[62619]: INFO nova.compute.manager [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Took 8.90 seconds to spawn the instance on the hypervisor. [ 1948.761408] env[62619]: DEBUG nova.compute.manager [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1948.762201] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a916963-5adf-41ac-8877-be0e86edb26b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.895490] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778629, 'name': ReconfigVM_Task, 'duration_secs': 0.37938} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.895756] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Reconfigured VM instance instance-0000006a to attach disk [datastore1] a0953370-77f2-4e3b-a92e-cb12b3a82361/a0953370-77f2-4e3b-a92e-cb12b3a82361.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1948.896403] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08c820c9-6ce3-439d-9ffd-add133999ddc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.902778] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1948.902778] env[62619]: value = "task-1778630" [ 1948.902778] env[62619]: _type = "Task" [ 1948.902778] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.910442] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778630, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.102051] env[62619]: DEBUG oslo_vmware.rw_handles [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Completed reading data from the image iterator. {{(pid=62619) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1949.102387] env[62619]: DEBUG oslo_vmware.rw_handles [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52da2959-f8c9-bc56-7221-ec2a1a564b49/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1949.103269] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91635cb1-2056-464b-aab7-5353d2df80ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.110249] env[62619]: DEBUG oslo_vmware.rw_handles [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52da2959-f8c9-bc56-7221-ec2a1a564b49/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1949.110450] env[62619]: DEBUG oslo_vmware.rw_handles [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52da2959-f8c9-bc56-7221-ec2a1a564b49/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1949.110682] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-9764f48f-918d-44f9-9929-3105872b5b26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.186792] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.575s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.206125] env[62619]: INFO nova.scheduler.client.report [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted allocations for instance 70265068-1185-4f23-b0b4-ed2378c17a89 [ 1949.280063] env[62619]: INFO nova.compute.manager [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Took 14.78 seconds to build instance. [ 1949.414254] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778630, 'name': Rename_Task, 'duration_secs': 0.202705} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.414571] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1949.414826] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de633542-503f-4170-a813-f84095ec93ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.420065] env[62619]: DEBUG oslo_vmware.rw_handles [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52da2959-f8c9-bc56-7221-ec2a1a564b49/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1949.420306] env[62619]: INFO nova.virt.vmwareapi.images [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Downloaded image file data d6107f81-5dc7-4bd9-877a-caaf1e8d2265 [ 1949.421986] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333654b8-dffe-4dea-82e2-559707eea6fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.424452] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1949.424452] env[62619]: value = "task-1778631" [ 1949.424452] env[62619]: _type = "Task" [ 1949.424452] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.439655] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adbd5bb3-b7b9-4f83-8920-ce6f2d4f4565 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.444718] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778631, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.471628] env[62619]: INFO nova.virt.vmwareapi.images [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] The imported VM was unregistered [ 1949.474019] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Caching image {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1949.474263] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Creating directory with path [datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1949.474550] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2514689-6dfe-4f0d-9c02-546f82682468 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.498868] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Created directory with path [datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1949.499123] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c/OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c.vmdk to [datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265/d6107f81-5dc7-4bd9-877a-caaf1e8d2265.vmdk. {{(pid=62619) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1949.499414] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-9bd181bb-424a-48df-aeb7-fe94bdad0d50 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.506655] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1949.506655] env[62619]: value = "task-1778633" [ 1949.506655] env[62619]: _type = "Task" [ 1949.506655] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.516070] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778633, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.552894] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1949.553161] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1949.553341] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1949.553503] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1949.620048] env[62619]: INFO nova.compute.manager [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Rebuilding instance [ 1949.655150] env[62619]: DEBUG nova.compute.manager [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1949.656050] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de8c10d-4261-49f3-912f-58c753df5801 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.713529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b010af46-fa1d-4cfd-a719-d2e9c27b4197 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "70265068-1185-4f23-b0b4-ed2378c17a89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.139s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.783252] env[62619]: DEBUG oslo_concurrency.lockutils [None req-31f7fcc9-9da2-4c22-99cc-a61b760283db tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "314758ce-6522-47cf-8445-0d28b1b085b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.313s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.935215] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778631, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.016513] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778633, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.441167] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778631, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.518034] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778633, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.671168] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1950.671573] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69a82a9e-17fa-406f-93d3-792e5f3c8401 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.684019] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1950.684019] env[62619]: value = "task-1778635" [ 1950.684019] env[62619]: _type = "Task" [ 1950.684019] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.696741] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778635, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.941852] env[62619]: DEBUG oslo_vmware.api [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778631, 'name': PowerOnVM_Task, 'duration_secs': 1.498519} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.942268] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1951.022297] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778633, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.056344] env[62619]: DEBUG nova.compute.manager [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1951.057379] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545c73df-ced6-4a35-b2af-e8417ad4e402 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.199377] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778635, 'name': PowerOffVM_Task, 'duration_secs': 0.242135} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.199711] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1951.199933] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1951.200737] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a0a04f-d245-4f1f-9f9b-7eccdfe90eb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.211712] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1951.211987] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f150c7c-f7ce-490e-991e-5ebafd37e668 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.240301] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1951.240541] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1951.240742] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Deleting the datastore file [datastore1] 314758ce-6522-47cf-8445-0d28b1b085b9 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1951.241014] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-682de832-03bf-4b94-95da-b6c01c7ecaeb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.252236] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1951.252236] env[62619]: value = "task-1778637" [ 1951.252236] env[62619]: _type = "Task" [ 1951.252236] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.265672] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.528079] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778633, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.581968] env[62619]: DEBUG oslo_concurrency.lockutils [None req-139c3ebb-1237-4974-9a72-fc9c64d40b5d tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.376s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.765861] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.022389] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778633, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.265374] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.520568] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778633, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.671555] env[62619]: DEBUG oslo_concurrency.lockutils [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.671878] env[62619]: DEBUG oslo_concurrency.lockutils [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.672083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.672271] env[62619]: DEBUG oslo_concurrency.lockutils [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.672462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.674636] env[62619]: INFO nova.compute.manager [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Terminating instance [ 1952.763279] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.138558} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.763584] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1952.763882] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1952.764061] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1953.021510] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778633, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.292278} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.021860] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c/OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c.vmdk to [datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265/d6107f81-5dc7-4bd9-877a-caaf1e8d2265.vmdk. [ 1953.022696] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Cleaning up location [datastore1] OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1953.022696] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_f752eaf2-cb59-4a22-abd8-aac01dfbc19c {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1953.022696] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0cb19f04-7309-43e1-bebb-c6f0d08153c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.028413] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1953.028413] env[62619]: value = "task-1778638" [ 1953.028413] env[62619]: _type = "Task" [ 1953.028413] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.035615] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778638, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.178818] env[62619]: DEBUG nova.compute.manager [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1953.179051] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1953.180314] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1a8d0d-aaf3-4e67-8c6b-810a9afe207a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.188080] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1953.188329] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-205b15bf-21a0-4a8f-aff7-a7b734c91616 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.194274] env[62619]: DEBUG oslo_vmware.api [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1953.194274] env[62619]: value = "task-1778639" [ 1953.194274] env[62619]: _type = "Task" [ 1953.194274] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.202198] env[62619]: DEBUG oslo_vmware.api [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.437706] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.437975] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.438199] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.438386] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.438552] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.440748] env[62619]: INFO nova.compute.manager [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Terminating instance [ 1953.537925] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778638, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206621} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.538267] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1953.538440] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265/d6107f81-5dc7-4bd9-877a-caaf1e8d2265.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.538677] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265/d6107f81-5dc7-4bd9-877a-caaf1e8d2265.vmdk to [datastore1] 13eeb4aa-0f20-4aed-9453-66afb0ff1152/13eeb4aa-0f20-4aed-9453-66afb0ff1152.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1953.538918] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d9a81b3-a072-44b1-847f-af40c667f2e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.545392] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1953.545392] env[62619]: value = "task-1778640" [ 1953.545392] env[62619]: _type = "Task" [ 1953.545392] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.552795] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778640, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.706121] env[62619]: DEBUG oslo_vmware.api [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778639, 'name': PowerOffVM_Task, 'duration_secs': 0.208821} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.706469] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1953.706645] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1953.706873] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b7aad1e-d6a3-4cdc-80c7-619d15206e05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.804664] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1953.804981] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1953.805301] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1953.805603] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1953.805936] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1953.806206] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1953.806552] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1953.806822] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1953.807114] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1953.807409] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1953.807640] env[62619]: DEBUG nova.virt.hardware [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1953.808795] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a367f4-f5af-4ace-bf53-44478ad846c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.813424] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1953.813655] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1953.813843] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleting the datastore file [datastore1] a0953370-77f2-4e3b-a92e-cb12b3a82361 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1953.814493] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea170cf3-035a-4ba2-85fd-3cfff07bd6ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.820278] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837a80cf-2cb9-4352-820e-f5dec629e65b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.825989] env[62619]: DEBUG oslo_vmware.api [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for the task: (returnval){ [ 1953.825989] env[62619]: value = "task-1778642" [ 1953.825989] env[62619]: _type = "Task" [ 1953.825989] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.835054] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1953.841570] env[62619]: DEBUG oslo.service.loopingcall [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1953.845213] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1953.845506] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-140559fd-3d18-42be-8a05-4b4182985447 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.864366] env[62619]: DEBUG oslo_vmware.api [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.865766] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1953.865766] env[62619]: value = "task-1778643" [ 1953.865766] env[62619]: _type = "Task" [ 1953.865766] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.875429] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778643, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.944553] env[62619]: DEBUG nova.compute.manager [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1953.944738] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1953.945637] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010fc3f6-ebd6-4eb3-8718-4fcb847acf79 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.955523] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1953.955804] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d54ec41a-1cc1-49ef-be49-bb55bf824b1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.963796] env[62619]: DEBUG oslo_vmware.api [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1953.963796] env[62619]: value = "task-1778644" [ 1953.963796] env[62619]: _type = "Task" [ 1953.963796] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.976459] env[62619]: DEBUG oslo_vmware.api [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.056768] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778640, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.347895] env[62619]: DEBUG oslo_vmware.api [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.378680] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778643, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.477688] env[62619]: DEBUG oslo_vmware.api [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.558955] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778640, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.848706] env[62619]: DEBUG oslo_vmware.api [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.878419] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778643, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.978343] env[62619]: DEBUG oslo_vmware.api [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778644, 'name': PowerOffVM_Task, 'duration_secs': 0.950734} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.978659] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1954.978832] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1954.979168] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2180c00-7968-435e-9728-15eb62e76c08 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.056432] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778640, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.346384] env[62619]: DEBUG oslo_vmware.api [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.377144] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778643, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.556374] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778640, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.610129] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1955.610332] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1955.610490] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleting the datastore file [datastore1] 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1955.610908] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e7a8bec-12b2-46b3-948e-563cd5574d6a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.620104] env[62619]: DEBUG oslo_vmware.api [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for the task: (returnval){ [ 1955.620104] env[62619]: value = "task-1778646" [ 1955.620104] env[62619]: _type = "Task" [ 1955.620104] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.631252] env[62619]: DEBUG oslo_vmware.api [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.647143] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "interface-a2064f8f-b928-44c0-86d8-c0bb9882dbde-80b186c1-7ddd-465a-9b4d-431a14224046" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.647573] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-a2064f8f-b928-44c0-86d8-c0bb9882dbde-80b186c1-7ddd-465a-9b4d-431a14224046" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.647978] env[62619]: DEBUG nova.objects.instance [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'flavor' on Instance uuid a2064f8f-b928-44c0-86d8-c0bb9882dbde {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1955.846956] env[62619]: DEBUG oslo_vmware.api [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Task: {'id': task-1778642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.677231} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.847211] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1955.847409] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1955.847625] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1955.847826] env[62619]: INFO nova.compute.manager [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Took 2.67 seconds to destroy the instance on the hypervisor. [ 1955.848093] env[62619]: DEBUG oslo.service.loopingcall [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1955.848306] env[62619]: DEBUG nova.compute.manager [-] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1955.848402] env[62619]: DEBUG nova.network.neutron [-] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1955.877622] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778643, 'name': CreateVM_Task, 'duration_secs': 1.899734} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.879843] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1955.880322] env[62619]: DEBUG oslo_concurrency.lockutils [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.880498] env[62619]: DEBUG oslo_concurrency.lockutils [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.880877] env[62619]: DEBUG oslo_concurrency.lockutils [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1955.881473] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0b9e908-017b-45b5-8d50-83ef5ef6b806 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.886127] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1955.886127] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52aabd8d-ad26-0c5c-e7b8-5c7c7f7a9cb7" [ 1955.886127] env[62619]: _type = "Task" [ 1955.886127] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.894478] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52aabd8d-ad26-0c5c-e7b8-5c7c7f7a9cb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.056435] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778640, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.228385} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.056705] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d6107f81-5dc7-4bd9-877a-caaf1e8d2265/d6107f81-5dc7-4bd9-877a-caaf1e8d2265.vmdk to [datastore1] 13eeb4aa-0f20-4aed-9453-66afb0ff1152/13eeb4aa-0f20-4aed-9453-66afb0ff1152.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1956.057506] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517476f3-3083-4dd6-a8f8-ff2a1ab1a479 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.080837] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 13eeb4aa-0f20-4aed-9453-66afb0ff1152/13eeb4aa-0f20-4aed-9453-66afb0ff1152.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1956.081166] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c3dd670-eb80-494e-847d-fb5b7e18d050 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.100880] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1956.100880] env[62619]: value = "task-1778647" [ 1956.100880] env[62619]: _type = "Task" [ 1956.100880] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.109019] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778647, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.130136] env[62619]: DEBUG oslo_vmware.api [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Task: {'id': task-1778646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281488} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.130407] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1956.130641] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1956.130862] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1956.131090] env[62619]: INFO nova.compute.manager [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Took 2.19 seconds to destroy the instance on the hypervisor. [ 1956.131376] env[62619]: DEBUG oslo.service.loopingcall [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1956.131628] env[62619]: DEBUG nova.compute.manager [-] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1956.131732] env[62619]: DEBUG nova.network.neutron [-] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1956.259644] env[62619]: DEBUG nova.objects.instance [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'pci_requests' on Instance uuid a2064f8f-b928-44c0-86d8-c0bb9882dbde {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1956.397312] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52aabd8d-ad26-0c5c-e7b8-5c7c7f7a9cb7, 'name': SearchDatastore_Task, 'duration_secs': 0.01346} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.397597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.397827] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1956.398063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.398238] env[62619]: DEBUG oslo_concurrency.lockutils [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.398384] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1956.398647] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f202486-b054-473a-95a8-5eff49e9aa50 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.412180] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1956.412180] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1956.413723] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-740a4683-c7ae-4ef3-9ecd-b8a91d3ae482 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.418103] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1956.418103] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524706a6-06fc-481f-b8a8-3507aea364f6" [ 1956.418103] env[62619]: _type = "Task" [ 1956.418103] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.428783] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524706a6-06fc-481f-b8a8-3507aea364f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.493109] env[62619]: DEBUG nova.compute.manager [req-7ab87874-e5b2-4db7-858d-48758e2b6206 req-6d1c67db-cc16-42e0-a442-2118b68a149d service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Received event network-vif-deleted-b78d1b6e-104b-4041-bcc5-5802f9f6fe3c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1956.493329] env[62619]: INFO nova.compute.manager [req-7ab87874-e5b2-4db7-858d-48758e2b6206 req-6d1c67db-cc16-42e0-a442-2118b68a149d service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Neutron deleted interface b78d1b6e-104b-4041-bcc5-5802f9f6fe3c; detaching it from the instance and deleting it from the info cache [ 1956.493501] env[62619]: DEBUG nova.network.neutron [req-7ab87874-e5b2-4db7-858d-48758e2b6206 req-6d1c67db-cc16-42e0-a442-2118b68a149d service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1956.611060] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778647, 'name': ReconfigVM_Task, 'duration_secs': 0.280185} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.611334] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 13eeb4aa-0f20-4aed-9453-66afb0ff1152/13eeb4aa-0f20-4aed-9453-66afb0ff1152.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1956.611942] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e757051b-44d9-4b19-8af5-cdd0ab1a175b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.619334] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1956.619334] env[62619]: value = "task-1778648" [ 1956.619334] env[62619]: _type = "Task" [ 1956.619334] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.626995] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778648, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.763365] env[62619]: DEBUG nova.objects.base [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1956.763582] env[62619]: DEBUG nova.network.neutron [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1956.827366] env[62619]: DEBUG nova.policy [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8d937f303584c3daea133a6283fd5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23d77e73a09d492695fbfe6ac2c93371', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1956.928236] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524706a6-06fc-481f-b8a8-3507aea364f6, 'name': SearchDatastore_Task, 'duration_secs': 0.013456} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.929100] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33eee8bb-e2cc-4253-aa35-ad210fa4e7c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.934265] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1956.934265] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eed3ce-068f-8f57-3bbf-dbdbdd98e5cc" [ 1956.934265] env[62619]: _type = "Task" [ 1956.934265] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.941916] env[62619]: DEBUG nova.network.neutron [-] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1956.943129] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eed3ce-068f-8f57-3bbf-dbdbdd98e5cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.995711] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5f090e31-4b4a-44f0-bdcc-0ebf42ca419f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.005196] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58f3810-bdd3-4bde-b208-bfe2cf6b4246 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.034857] env[62619]: DEBUG nova.compute.manager [req-7ab87874-e5b2-4db7-858d-48758e2b6206 req-6d1c67db-cc16-42e0-a442-2118b68a149d service nova] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Detach interface failed, port_id=b78d1b6e-104b-4041-bcc5-5802f9f6fe3c, reason: Instance a0953370-77f2-4e3b-a92e-cb12b3a82361 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1957.059384] env[62619]: DEBUG nova.network.neutron [-] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1957.131033] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778648, 'name': Rename_Task, 'duration_secs': 0.203846} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.131033] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1957.131033] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-422344a9-e40a-4e05-9325-5829ec3ef296 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.138827] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1957.138827] env[62619]: value = "task-1778649" [ 1957.138827] env[62619]: _type = "Task" [ 1957.138827] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.147437] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778649, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.446052] env[62619]: INFO nova.compute.manager [-] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Took 1.60 seconds to deallocate network for instance. [ 1957.446052] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eed3ce-068f-8f57-3bbf-dbdbdd98e5cc, 'name': SearchDatastore_Task, 'duration_secs': 0.015629} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.447182] env[62619]: DEBUG oslo_concurrency.lockutils [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1957.447431] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 314758ce-6522-47cf-8445-0d28b1b085b9/314758ce-6522-47cf-8445-0d28b1b085b9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1957.450115] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-480f5858-0e50-4d13-bdc8-76f6188ab36a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.456421] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1957.456421] env[62619]: value = "task-1778650" [ 1957.456421] env[62619]: _type = "Task" [ 1957.456421] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.465346] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.561087] env[62619]: INFO nova.compute.manager [-] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Took 1.43 seconds to deallocate network for instance. [ 1957.651218] env[62619]: DEBUG oslo_vmware.api [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778649, 'name': PowerOnVM_Task, 'duration_secs': 0.472516} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.651536] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1957.766895] env[62619]: DEBUG nova.compute.manager [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1957.767896] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278c80b1-6bc8-43b0-aef2-fe4cb695c24f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.953639] env[62619]: DEBUG oslo_concurrency.lockutils [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.953984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.954082] env[62619]: DEBUG nova.objects.instance [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lazy-loading 'resources' on Instance uuid a0953370-77f2-4e3b-a92e-cb12b3a82361 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1957.968763] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778650, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.417702} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.969025] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 314758ce-6522-47cf-8445-0d28b1b085b9/314758ce-6522-47cf-8445-0d28b1b085b9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1957.969269] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1957.969495] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0cb67c0a-2d76-4578-9665-9cd8a1130dfd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.976207] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1957.976207] env[62619]: value = "task-1778651" [ 1957.976207] env[62619]: _type = "Task" [ 1957.976207] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.983915] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778651, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.068064] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.287282] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2cd8db39-6efa-4178-a74c-4394d2cd95ca tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 22.220s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.486226] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778651, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058342} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.486341] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1958.487029] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba681541-ecd5-49e5-8dcd-9247fe351109 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.509673] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 314758ce-6522-47cf-8445-0d28b1b085b9/314758ce-6522-47cf-8445-0d28b1b085b9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1958.512247] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-258ab91f-8720-492b-a33e-8660c0b78d5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.533972] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1958.533972] env[62619]: value = "task-1778652" [ 1958.533972] env[62619]: _type = "Task" [ 1958.533972] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.545209] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778652, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.568326] env[62619]: DEBUG nova.compute.manager [req-327d2599-a441-4b8d-b2ba-087e677ba458 req-c4191baa-3660-4e80-8032-58c2ee033e01 service nova] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Received event network-vif-deleted-28e9da04-af12-4a21-b4ee-408c492669ef {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1958.568543] env[62619]: DEBUG nova.compute.manager [req-327d2599-a441-4b8d-b2ba-087e677ba458 req-c4191baa-3660-4e80-8032-58c2ee033e01 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received event network-vif-plugged-80b186c1-7ddd-465a-9b4d-431a14224046 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1958.568733] env[62619]: DEBUG oslo_concurrency.lockutils [req-327d2599-a441-4b8d-b2ba-087e677ba458 req-c4191baa-3660-4e80-8032-58c2ee033e01 service nova] Acquiring lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.568973] env[62619]: DEBUG oslo_concurrency.lockutils [req-327d2599-a441-4b8d-b2ba-087e677ba458 req-c4191baa-3660-4e80-8032-58c2ee033e01 service nova] Lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.569099] env[62619]: DEBUG oslo_concurrency.lockutils [req-327d2599-a441-4b8d-b2ba-087e677ba458 req-c4191baa-3660-4e80-8032-58c2ee033e01 service nova] Lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.569263] env[62619]: DEBUG nova.compute.manager [req-327d2599-a441-4b8d-b2ba-087e677ba458 req-c4191baa-3660-4e80-8032-58c2ee033e01 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] No waiting events found dispatching network-vif-plugged-80b186c1-7ddd-465a-9b4d-431a14224046 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1958.569411] env[62619]: WARNING nova.compute.manager [req-327d2599-a441-4b8d-b2ba-087e677ba458 req-c4191baa-3660-4e80-8032-58c2ee033e01 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received unexpected event network-vif-plugged-80b186c1-7ddd-465a-9b4d-431a14224046 for instance with vm_state active and task_state None. [ 1958.578463] env[62619]: DEBUG nova.network.neutron [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Successfully updated port: 80b186c1-7ddd-465a-9b4d-431a14224046 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1958.608741] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d1aa9d-ec6d-44df-92e6-03b6405eb234 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.616958] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274648e9-e084-4227-bbb4-3322fb27a50a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.646784] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82229cb1-4fa3-4809-9077-8fb50f6c2100 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.654076] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725ebb74-b79c-4661-b2d4-5fef4bd8659e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.668438] env[62619]: DEBUG nova.compute.provider_tree [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1959.044461] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778652, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.081279] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.081485] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.081670] env[62619]: DEBUG nova.network.neutron [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1959.200173] env[62619]: DEBUG nova.scheduler.client.report [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updated inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b with generation 172 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1959.200416] env[62619]: DEBUG nova.compute.provider_tree [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updating resource provider e814b747-ed75-487b-a97d-acf66bc6db0b generation from 172 to 173 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1959.200619] env[62619]: DEBUG nova.compute.provider_tree [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1959.544671] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778652, 'name': ReconfigVM_Task, 'duration_secs': 0.5325} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.544943] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 314758ce-6522-47cf-8445-0d28b1b085b9/314758ce-6522-47cf-8445-0d28b1b085b9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1959.545553] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6abede1-b8d2-402f-952e-a1fe406f0688 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.551196] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1959.551196] env[62619]: value = "task-1778653" [ 1959.551196] env[62619]: _type = "Task" [ 1959.551196] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.558773] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778653, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.618262] env[62619]: WARNING nova.network.neutron [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] ed952a81-cb24-4b52-a137-9ceeefb896cf already exists in list: networks containing: ['ed952a81-cb24-4b52-a137-9ceeefb896cf']. ignoring it [ 1959.705719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.752s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.708934] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.640s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.709187] env[62619]: DEBUG nova.objects.instance [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lazy-loading 'resources' on Instance uuid 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1959.731223] env[62619]: INFO nova.scheduler.client.report [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Deleted allocations for instance a0953370-77f2-4e3b-a92e-cb12b3a82361 [ 1959.945064] env[62619]: DEBUG nova.network.neutron [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [{"id": "35375282-c697-45eb-a87c-d85555a9012b", "address": "fa:16:3e:87:9d:97", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35375282-c6", "ovs_interfaceid": "35375282-c697-45eb-a87c-d85555a9012b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "80b186c1-7ddd-465a-9b4d-431a14224046", "address": "fa:16:3e:d4:a3:f5", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80b186c1-7d", "ovs_interfaceid": "80b186c1-7ddd-465a-9b4d-431a14224046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.060980] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778653, 'name': Rename_Task, 'duration_secs': 0.127578} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.061367] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1960.061587] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e5f4b2f-010c-4f7c-806b-3d8e5a2d6cea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.067577] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1960.067577] env[62619]: value = "task-1778655" [ 1960.067577] env[62619]: _type = "Task" [ 1960.067577] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.075132] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778655, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.240778] env[62619]: DEBUG oslo_concurrency.lockutils [None req-761800f5-2963-4287-8441-63aac5ac9eeb tempest-ServerActionsTestOtherB-723624610 tempest-ServerActionsTestOtherB-723624610-project-member] Lock "a0953370-77f2-4e3b-a92e-cb12b3a82361" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.569s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.320261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a928833-e2b3-468b-82b1-7792c932bd5d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.329017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb6f413-d63f-4c59-9633-ae0599ef1483 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.361650] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6038dd-a2ef-4315-ba1f-460074b41bb3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.369536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b40321e-4fa9-416a-b691-a35c54e9f948 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.384383] env[62619]: DEBUG nova.compute.provider_tree [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1960.448071] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.448550] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.448713] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.449635] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc96a0a-bb58-45dd-898c-ec8a55bc9071 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.467137] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1960.467385] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1960.467569] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1960.467754] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1960.467899] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1960.468054] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1960.468260] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1960.468415] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1960.468573] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1960.468734] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1960.468905] env[62619]: DEBUG nova.virt.hardware [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1960.475096] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Reconfiguring VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1960.475422] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f72c95f-a0b0-4e6f-a581-1acbdcf79511 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.493078] env[62619]: DEBUG oslo_vmware.api [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1960.493078] env[62619]: value = "task-1778656" [ 1960.493078] env[62619]: _type = "Task" [ 1960.493078] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.501258] env[62619]: DEBUG oslo_vmware.api [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778656, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.577556] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778655, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.601450] env[62619]: DEBUG nova.compute.manager [req-52b1c12a-01e5-455f-bba3-d8460b0ffc25 req-0ac73c23-ab1f-47d0-a6e9-8ae0db3a6a78 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received event network-changed-80b186c1-7ddd-465a-9b4d-431a14224046 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1960.601650] env[62619]: DEBUG nova.compute.manager [req-52b1c12a-01e5-455f-bba3-d8460b0ffc25 req-0ac73c23-ab1f-47d0-a6e9-8ae0db3a6a78 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing instance network info cache due to event network-changed-80b186c1-7ddd-465a-9b4d-431a14224046. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1960.601858] env[62619]: DEBUG oslo_concurrency.lockutils [req-52b1c12a-01e5-455f-bba3-d8460b0ffc25 req-0ac73c23-ab1f-47d0-a6e9-8ae0db3a6a78 service nova] Acquiring lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.601999] env[62619]: DEBUG oslo_concurrency.lockutils [req-52b1c12a-01e5-455f-bba3-d8460b0ffc25 req-0ac73c23-ab1f-47d0-a6e9-8ae0db3a6a78 service nova] Acquired lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.602170] env[62619]: DEBUG nova.network.neutron [req-52b1c12a-01e5-455f-bba3-d8460b0ffc25 req-0ac73c23-ab1f-47d0-a6e9-8ae0db3a6a78 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing network info cache for port 80b186c1-7ddd-465a-9b4d-431a14224046 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1960.887598] env[62619]: DEBUG nova.scheduler.client.report [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1961.005222] env[62619]: DEBUG oslo_vmware.api [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778656, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.078081] env[62619]: DEBUG oslo_vmware.api [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778655, 'name': PowerOnVM_Task, 'duration_secs': 0.546946} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.078460] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1961.079058] env[62619]: DEBUG nova.compute.manager [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1961.079288] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b0dae8-5187-4c41-8afc-dc0cc965c1d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.316234] env[62619]: DEBUG nova.network.neutron [req-52b1c12a-01e5-455f-bba3-d8460b0ffc25 req-0ac73c23-ab1f-47d0-a6e9-8ae0db3a6a78 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updated VIF entry in instance network info cache for port 80b186c1-7ddd-465a-9b4d-431a14224046. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1961.316670] env[62619]: DEBUG nova.network.neutron [req-52b1c12a-01e5-455f-bba3-d8460b0ffc25 req-0ac73c23-ab1f-47d0-a6e9-8ae0db3a6a78 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [{"id": "35375282-c697-45eb-a87c-d85555a9012b", "address": "fa:16:3e:87:9d:97", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35375282-c6", "ovs_interfaceid": "35375282-c697-45eb-a87c-d85555a9012b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "80b186c1-7ddd-465a-9b4d-431a14224046", "address": "fa:16:3e:d4:a3:f5", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80b186c1-7d", "ovs_interfaceid": "80b186c1-7ddd-465a-9b4d-431a14224046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.392455] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.412024] env[62619]: INFO nova.scheduler.client.report [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Deleted allocations for instance 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2 [ 1961.505037] env[62619]: DEBUG oslo_vmware.api [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778656, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.595380] env[62619]: DEBUG oslo_concurrency.lockutils [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.595622] env[62619]: DEBUG oslo_concurrency.lockutils [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.595801] env[62619]: DEBUG nova.objects.instance [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1961.819305] env[62619]: DEBUG oslo_concurrency.lockutils [req-52b1c12a-01e5-455f-bba3-d8460b0ffc25 req-0ac73c23-ab1f-47d0-a6e9-8ae0db3a6a78 service nova] Releasing lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.918145] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f094742b-1c55-4100-8818-543a4c761922 tempest-ServerActionsTestOtherA-2061775145 tempest-ServerActionsTestOtherA-2061775145-project-member] Lock "006c9f0b-4b53-4740-9f67-ec9b19b8bcb2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.480s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.007029] env[62619]: DEBUG oslo_vmware.api [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778656, 'name': ReconfigVM_Task, 'duration_secs': 1.244023} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.007029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.007029] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Reconfigured VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1962.512130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff7b6d7c-41de-4ceb-aef3-9f5f04b13c54 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-a2064f8f-b928-44c0-86d8-c0bb9882dbde-80b186c1-7ddd-465a-9b4d-431a14224046" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.864s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.604958] env[62619]: DEBUG oslo_concurrency.lockutils [None req-41deaabc-943b-49f0-bc01-653e4982efa7 tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.669664] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "314758ce-6522-47cf-8445-0d28b1b085b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.669664] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "314758ce-6522-47cf-8445-0d28b1b085b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.669867] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "314758ce-6522-47cf-8445-0d28b1b085b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.670019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "314758ce-6522-47cf-8445-0d28b1b085b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.671020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "314758ce-6522-47cf-8445-0d28b1b085b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.672389] env[62619]: INFO nova.compute.manager [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Terminating instance [ 1963.176280] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "refresh_cache-314758ce-6522-47cf-8445-0d28b1b085b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.176491] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquired lock "refresh_cache-314758ce-6522-47cf-8445-0d28b1b085b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1963.176672] env[62619]: DEBUG nova.network.neutron [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1963.720796] env[62619]: DEBUG nova.network.neutron [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1963.772350] env[62619]: DEBUG nova.network.neutron [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.868170] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "interface-a2064f8f-b928-44c0-86d8-c0bb9882dbde-80b186c1-7ddd-465a-9b4d-431a14224046" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.868370] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-a2064f8f-b928-44c0-86d8-c0bb9882dbde-80b186c1-7ddd-465a-9b4d-431a14224046" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.275049] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Releasing lock "refresh_cache-314758ce-6522-47cf-8445-0d28b1b085b9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.275514] env[62619]: DEBUG nova.compute.manager [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1964.275715] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1964.276633] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4864285e-c7b5-414d-91b0-dd2e301e7571 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.285442] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1964.285670] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a70b1df-a765-49f6-91a5-ae8d1f19edf1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.293120] env[62619]: DEBUG oslo_vmware.api [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1964.293120] env[62619]: value = "task-1778657" [ 1964.293120] env[62619]: _type = "Task" [ 1964.293120] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.302058] env[62619]: DEBUG oslo_vmware.api [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778657, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.371739] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.371946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.372829] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d16956c-fab5-4a3b-a1e5-0d954868e8a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.390752] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a07b14-685f-4761-b9fa-8410bbe2459b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.416303] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Reconfiguring VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1964.416549] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-200c46b7-a6fc-4ce6-8d13-8d373f7b2fb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.438070] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1964.438070] env[62619]: value = "task-1778658" [ 1964.438070] env[62619]: _type = "Task" [ 1964.438070] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.445635] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.803879] env[62619]: DEBUG oslo_vmware.api [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778657, 'name': PowerOffVM_Task, 'duration_secs': 0.24846} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.804206] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1964.804352] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1964.804613] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-789c896f-f965-4047-9960-2355277b6d71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.831756] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1964.832025] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1964.832254] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Deleting the datastore file [datastore1] 314758ce-6522-47cf-8445-0d28b1b085b9 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1964.832472] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7ebc748-c464-445f-83bd-aada076c4e29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.839198] env[62619]: DEBUG oslo_vmware.api [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for the task: (returnval){ [ 1964.839198] env[62619]: value = "task-1778660" [ 1964.839198] env[62619]: _type = "Task" [ 1964.839198] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.848473] env[62619]: DEBUG oslo_vmware.api [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778660, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.947112] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.349782] env[62619]: DEBUG oslo_vmware.api [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Task: {'id': task-1778660, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093855} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.350103] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1965.350321] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1965.350518] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1965.350701] env[62619]: INFO nova.compute.manager [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1965.350949] env[62619]: DEBUG oslo.service.loopingcall [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1965.351186] env[62619]: DEBUG nova.compute.manager [-] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1965.351283] env[62619]: DEBUG nova.network.neutron [-] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1965.367782] env[62619]: DEBUG nova.network.neutron [-] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1965.447998] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.872068] env[62619]: DEBUG nova.network.neutron [-] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1965.949759] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.375501] env[62619]: INFO nova.compute.manager [-] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Took 1.02 seconds to deallocate network for instance. [ 1966.450129] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.881632] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.881943] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.882335] env[62619]: DEBUG nova.objects.instance [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lazy-loading 'resources' on Instance uuid 314758ce-6522-47cf-8445-0d28b1b085b9 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1966.950213] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.431730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "0cde512e-a9f8-4f9b-9e08-41494090e314" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.431972] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "0cde512e-a9f8-4f9b-9e08-41494090e314" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.453016] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.485787] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fccdd99-c0f4-46c2-b1aa-569fae88044d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.493123] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08af70a7-cb92-41ab-ab81-88fe67b6ffa7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.523712] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c55245-f1a0-4c96-a722-d985cd3bc1a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.530393] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c808533e-b773-4d1e-872c-0c20578d06b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.543005] env[62619]: DEBUG nova.compute.provider_tree [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1967.934422] env[62619]: DEBUG nova.compute.manager [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1967.951534] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.046054] env[62619]: DEBUG nova.scheduler.client.report [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1968.452147] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.452847] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.551339] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.669s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.553758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.101s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.555323] env[62619]: INFO nova.compute.claims [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1968.572657] env[62619]: INFO nova.scheduler.client.report [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Deleted allocations for instance 314758ce-6522-47cf-8445-0d28b1b085b9 [ 1968.958855] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.081232] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ad60c332-0f64-4cd0-bf42-98533342297e tempest-ServerShowV257Test-1195786436 tempest-ServerShowV257Test-1195786436-project-member] Lock "314758ce-6522-47cf-8445-0d28b1b085b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.412s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.456998] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.647443] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870aa596-d33f-42d6-a403-b5ef3f33891d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.656106] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d06ebd-19ce-4d39-b179-20c6b401d795 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.697755] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1f3524-d40c-4b68-887e-f1f2f854e0ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.706193] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02c46ec-f33a-4465-ba1a-0e41cc0f7a6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.721500] env[62619]: DEBUG nova.compute.provider_tree [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1969.957587] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.225116] env[62619]: DEBUG nova.scheduler.client.report [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1970.458533] env[62619]: DEBUG oslo_vmware.api [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778658, 'name': ReconfigVM_Task, 'duration_secs': 5.833996} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.458857] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.459097] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Reconfigured VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1970.730886] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.177s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1970.731438] env[62619]: DEBUG nova.compute.manager [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1971.191523] env[62619]: DEBUG oslo_concurrency.lockutils [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.191884] env[62619]: DEBUG oslo_concurrency.lockutils [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.236446] env[62619]: DEBUG nova.compute.utils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1971.239246] env[62619]: DEBUG nova.compute.manager [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1971.239246] env[62619]: DEBUG nova.network.neutron [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1971.287217] env[62619]: DEBUG nova.policy [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53deb71781e14136bff2b0b6c6a82890', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2c7470712c14fa9bc1804ae2431107b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1971.591954] env[62619]: DEBUG nova.network.neutron [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Successfully created port: 9db75e6b-9462-4d5f-8939-a281ea75ea84 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1971.697913] env[62619]: INFO nova.compute.manager [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Detaching volume 88d1f447-9b6e-467a-a32e-2d78a5468a11 [ 1971.738766] env[62619]: INFO nova.virt.block_device [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Attempting to driver detach volume 88d1f447-9b6e-467a-a32e-2d78a5468a11 from mountpoint /dev/sdb [ 1971.739026] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1971.739223] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369183', 'volume_id': '88d1f447-9b6e-467a-a32e-2d78a5468a11', 'name': 'volume-88d1f447-9b6e-467a-a32e-2d78a5468a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cec0ea75-042d-4ee5-91d5-cad86456ab97', 'attached_at': '', 'detached_at': '', 'volume_id': '88d1f447-9b6e-467a-a32e-2d78a5468a11', 'serial': '88d1f447-9b6e-467a-a32e-2d78a5468a11'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1971.740168] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60848aaf-e4b4-4efc-8093-ea6a1719abf9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.746942] env[62619]: DEBUG nova.compute.manager [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1971.777169] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37d7de1-3263-4d97-ba28-6af85c32fbe3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.784549] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82d217d-7035-46d2-b22f-e3357859e5a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.806145] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6a7c74-a9e9-41c2-85ad-5965348ad373 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.822213] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] The volume has not been displaced from its original location: [datastore1] volume-88d1f447-9b6e-467a-a32e-2d78a5468a11/volume-88d1f447-9b6e-467a-a32e-2d78a5468a11.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1971.829017] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Reconfiguring VM instance instance-0000006e to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1971.829017] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6317eeef-fcf6-404a-b41d-23a8698f13c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.847868] env[62619]: DEBUG oslo_vmware.api [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1971.847868] env[62619]: value = "task-1778661" [ 1971.847868] env[62619]: _type = "Task" [ 1971.847868] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.857936] env[62619]: DEBUG oslo_vmware.api [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778661, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.960277] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.961023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.961023] env[62619]: DEBUG nova.network.neutron [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1972.358434] env[62619]: DEBUG oslo_vmware.api [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778661, 'name': ReconfigVM_Task, 'duration_secs': 0.215063} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.358776] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Reconfigured VM instance instance-0000006e to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1972.364167] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2069878-5853-4754-adb7-1628cf83960c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.379471] env[62619]: DEBUG oslo_vmware.api [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1972.379471] env[62619]: value = "task-1778662" [ 1972.379471] env[62619]: _type = "Task" [ 1972.379471] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.388144] env[62619]: DEBUG oslo_vmware.api [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.760957] env[62619]: DEBUG nova.compute.manager [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1972.786608] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1972.786953] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1972.787076] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1972.787297] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1972.787618] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1972.787618] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1972.787794] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1972.787950] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1972.788192] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1972.788371] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1972.788581] env[62619]: DEBUG nova.virt.hardware [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1972.789502] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39ddaf5-6cab-4edc-8c8a-b3a79ed84858 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.800592] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ca4cf0-127c-413c-9ab3-2783eac7ede0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.894351] env[62619]: DEBUG oslo_vmware.api [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778662, 'name': ReconfigVM_Task, 'duration_secs': 0.138157} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.894351] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369183', 'volume_id': '88d1f447-9b6e-467a-a32e-2d78a5468a11', 'name': 'volume-88d1f447-9b6e-467a-a32e-2d78a5468a11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cec0ea75-042d-4ee5-91d5-cad86456ab97', 'attached_at': '', 'detached_at': '', 'volume_id': '88d1f447-9b6e-467a-a32e-2d78a5468a11', 'serial': '88d1f447-9b6e-467a-a32e-2d78a5468a11'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1972.986869] env[62619]: INFO nova.network.neutron [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Port 80b186c1-7ddd-465a-9b4d-431a14224046 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1972.987047] env[62619]: DEBUG nova.network.neutron [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [{"id": "35375282-c697-45eb-a87c-d85555a9012b", "address": "fa:16:3e:87:9d:97", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35375282-c6", "ovs_interfaceid": "35375282-c697-45eb-a87c-d85555a9012b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.239010] env[62619]: DEBUG nova.compute.manager [req-8e5ddcd7-d780-4012-bb89-fc400d70734f req-04b5ff7f-9fb0-4848-90d0-4073e24b94f1 service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Received event network-vif-plugged-9db75e6b-9462-4d5f-8939-a281ea75ea84 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1973.239304] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e5ddcd7-d780-4012-bb89-fc400d70734f req-04b5ff7f-9fb0-4848-90d0-4073e24b94f1 service nova] Acquiring lock "0cde512e-a9f8-4f9b-9e08-41494090e314-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.239523] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e5ddcd7-d780-4012-bb89-fc400d70734f req-04b5ff7f-9fb0-4848-90d0-4073e24b94f1 service nova] Lock "0cde512e-a9f8-4f9b-9e08-41494090e314-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.239705] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e5ddcd7-d780-4012-bb89-fc400d70734f req-04b5ff7f-9fb0-4848-90d0-4073e24b94f1 service nova] Lock "0cde512e-a9f8-4f9b-9e08-41494090e314-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.239933] env[62619]: DEBUG nova.compute.manager [req-8e5ddcd7-d780-4012-bb89-fc400d70734f req-04b5ff7f-9fb0-4848-90d0-4073e24b94f1 service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] No waiting events found dispatching network-vif-plugged-9db75e6b-9462-4d5f-8939-a281ea75ea84 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1973.240180] env[62619]: WARNING nova.compute.manager [req-8e5ddcd7-d780-4012-bb89-fc400d70734f req-04b5ff7f-9fb0-4848-90d0-4073e24b94f1 service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Received unexpected event network-vif-plugged-9db75e6b-9462-4d5f-8939-a281ea75ea84 for instance with vm_state building and task_state spawning. [ 1973.415793] env[62619]: DEBUG nova.network.neutron [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Successfully updated port: 9db75e6b-9462-4d5f-8939-a281ea75ea84 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1973.448711] env[62619]: DEBUG nova.objects.instance [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'flavor' on Instance uuid cec0ea75-042d-4ee5-91d5-cad86456ab97 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1973.490444] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1973.582686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.582686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.919026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-0cde512e-a9f8-4f9b-9e08-41494090e314" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.919168] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-0cde512e-a9f8-4f9b-9e08-41494090e314" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.919296] env[62619]: DEBUG nova.network.neutron [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1973.994500] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6eb4d31-48ea-4491-a8b5-a2c555715561 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-a2064f8f-b928-44c0-86d8-c0bb9882dbde-80b186c1-7ddd-465a-9b4d-431a14224046" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.126s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.085920] env[62619]: DEBUG nova.compute.manager [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1974.090498] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "interface-9e69e4ab-f219-4ba8-8f66-6063d8ab242a-80b186c1-7ddd-465a-9b4d-431a14224046" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.090751] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-9e69e4ab-f219-4ba8-8f66-6063d8ab242a-80b186c1-7ddd-465a-9b4d-431a14224046" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.091142] env[62619]: DEBUG nova.objects.instance [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'flavor' on Instance uuid 9e69e4ab-f219-4ba8-8f66-6063d8ab242a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1974.387535] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.450701] env[62619]: DEBUG nova.network.neutron [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1974.455250] env[62619]: DEBUG oslo_concurrency.lockutils [None req-60712cf9-6c9f-4dda-9c25-0eab8ab8dc87 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.263s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.456301] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.069s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.456492] env[62619]: DEBUG nova.compute.manager [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1974.457358] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492b3b71-2a8f-473f-9a31-2de3cd896e2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.464371] env[62619]: DEBUG nova.compute.manager [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1974.464930] env[62619]: DEBUG nova.objects.instance [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'flavor' on Instance uuid cec0ea75-042d-4ee5-91d5-cad86456ab97 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1974.582617] env[62619]: DEBUG nova.network.neutron [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Updating instance_info_cache with network_info: [{"id": "9db75e6b-9462-4d5f-8939-a281ea75ea84", "address": "fa:16:3e:dd:8b:ee", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db75e6b-94", "ovs_interfaceid": "9db75e6b-9462-4d5f-8939-a281ea75ea84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.610074] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.610341] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.612333] env[62619]: INFO nova.compute.claims [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1974.690061] env[62619]: DEBUG nova.objects.instance [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'pci_requests' on Instance uuid 9e69e4ab-f219-4ba8-8f66-6063d8ab242a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1975.085192] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-0cde512e-a9f8-4f9b-9e08-41494090e314" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.085518] env[62619]: DEBUG nova.compute.manager [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Instance network_info: |[{"id": "9db75e6b-9462-4d5f-8939-a281ea75ea84", "address": "fa:16:3e:dd:8b:ee", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db75e6b-94", "ovs_interfaceid": "9db75e6b-9462-4d5f-8939-a281ea75ea84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1975.085947] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:8b:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9db75e6b-9462-4d5f-8939-a281ea75ea84', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1975.093523] env[62619]: DEBUG oslo.service.loopingcall [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1975.093735] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1975.093967] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82870163-633b-48a5-b3ef-6a72fce3786e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.114754] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1975.114754] env[62619]: value = "task-1778663" [ 1975.114754] env[62619]: _type = "Task" [ 1975.114754] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.124529] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778663, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.193016] env[62619]: DEBUG nova.objects.base [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Object Instance<9e69e4ab-f219-4ba8-8f66-6063d8ab242a> lazy-loaded attributes: flavor,pci_requests {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1975.193425] env[62619]: DEBUG nova.network.neutron [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1975.260367] env[62619]: DEBUG nova.policy [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8d937f303584c3daea133a6283fd5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23d77e73a09d492695fbfe6ac2c93371', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1975.268464] env[62619]: DEBUG nova.compute.manager [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received event network-changed-35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1975.268649] env[62619]: DEBUG nova.compute.manager [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing instance network info cache due to event network-changed-35375282-c697-45eb-a87c-d85555a9012b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1975.268854] env[62619]: DEBUG oslo_concurrency.lockutils [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] Acquiring lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.268994] env[62619]: DEBUG oslo_concurrency.lockutils [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] Acquired lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.269173] env[62619]: DEBUG nova.network.neutron [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Refreshing network info cache for port 35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1975.475178] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1975.475489] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45284750-912c-480e-821e-fa544e5abe1e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.482901] env[62619]: DEBUG oslo_vmware.api [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1975.482901] env[62619]: value = "task-1778664" [ 1975.482901] env[62619]: _type = "Task" [ 1975.482901] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.491451] env[62619]: DEBUG oslo_vmware.api [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778664, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.626715] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778663, 'name': CreateVM_Task, 'duration_secs': 0.316394} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.626868] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1975.627467] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.627632] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.627938] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1975.628200] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fb82914-5b07-44f9-9f01-27567b1446bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.632631] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1975.632631] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5200a7ac-426b-3d95-1ca7-5a0916500530" [ 1975.632631] env[62619]: _type = "Task" [ 1975.632631] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.640414] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5200a7ac-426b-3d95-1ca7-5a0916500530, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.712575] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29c031f-7857-435c-811f-07271b393fbd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.719886] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b176f11-5e3e-42c0-988a-ca177d35cf86 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.748596] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da983b5-853d-4799-9986-1b9299d76fee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.755490] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0d58a2-c5c8-43cb-845c-e49620d046a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.767800] env[62619]: DEBUG nova.compute.provider_tree [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1975.992126] env[62619]: DEBUG oslo_vmware.api [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778664, 'name': PowerOffVM_Task, 'duration_secs': 0.172692} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.992395] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1975.992591] env[62619]: DEBUG nova.compute.manager [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1975.993334] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b886b92f-c910-44ad-806b-99f12bac5d41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.996076] env[62619]: DEBUG nova.network.neutron [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updated VIF entry in instance network info cache for port 35375282-c697-45eb-a87c-d85555a9012b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1975.996387] env[62619]: DEBUG nova.network.neutron [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [{"id": "35375282-c697-45eb-a87c-d85555a9012b", "address": "fa:16:3e:87:9d:97", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35375282-c6", "ovs_interfaceid": "35375282-c697-45eb-a87c-d85555a9012b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.143280] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5200a7ac-426b-3d95-1ca7-5a0916500530, 'name': SearchDatastore_Task, 'duration_secs': 0.009489} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.143596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.143822] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1976.144058] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.144244] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.144466] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1976.144725] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-315de38c-3e2b-4c90-a0db-4a150d42c31f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.155101] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1976.155207] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1976.155861] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acde9c88-1fb6-40c3-a449-5d842e50ef1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.161366] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1976.161366] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5271ba62-9d19-78c9-c78b-c56a54b4677e" [ 1976.161366] env[62619]: _type = "Task" [ 1976.161366] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.169062] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5271ba62-9d19-78c9-c78b-c56a54b4677e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.271448] env[62619]: DEBUG nova.scheduler.client.report [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1976.499422] env[62619]: DEBUG oslo_concurrency.lockutils [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] Releasing lock "refresh_cache-a2064f8f-b928-44c0-86d8-c0bb9882dbde" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.499730] env[62619]: DEBUG nova.compute.manager [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Received event network-changed-56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1976.499886] env[62619]: DEBUG nova.compute.manager [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Refreshing instance network info cache due to event network-changed-56f48ead-6f6e-451e-af3c-2634f9797c5e. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1976.500118] env[62619]: DEBUG oslo_concurrency.lockutils [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] Acquiring lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.500309] env[62619]: DEBUG oslo_concurrency.lockutils [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] Acquired lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.500493] env[62619]: DEBUG nova.network.neutron [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Refreshing network info cache for port 56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1976.505712] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d2d3ff3b-ef62-4588-a35a-ff3d4e96f414 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1976.672348] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5271ba62-9d19-78c9-c78b-c56a54b4677e, 'name': SearchDatastore_Task, 'duration_secs': 0.008779} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.673301] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de1f78c4-9d62-428b-8e6e-474eb84eb041 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.678456] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1976.678456] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280c76a-9bf6-b335-b779-9eb8d46831a1" [ 1976.678456] env[62619]: _type = "Task" [ 1976.678456] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.685925] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280c76a-9bf6-b335-b779-9eb8d46831a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.717076] env[62619]: DEBUG nova.network.neutron [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Successfully updated port: 80b186c1-7ddd-465a-9b4d-431a14224046 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1976.776262] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.166s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1976.776797] env[62619]: DEBUG nova.compute.manager [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1976.798818] env[62619]: DEBUG nova.objects.instance [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'flavor' on Instance uuid cec0ea75-042d-4ee5-91d5-cad86456ab97 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1977.192143] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5280c76a-9bf6-b335-b779-9eb8d46831a1, 'name': SearchDatastore_Task, 'duration_secs': 0.009184} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.192617] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.193021] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0cde512e-a9f8-4f9b-9e08-41494090e314/0cde512e-a9f8-4f9b-9e08-41494090e314.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1977.193515] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce9a8de3-cf8a-4aca-a614-144281731460 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.200184] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1977.200184] env[62619]: value = "task-1778665" [ 1977.200184] env[62619]: _type = "Task" [ 1977.200184] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.208049] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.219869] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.225981] env[62619]: DEBUG nova.network.neutron [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updated VIF entry in instance network info cache for port 56f48ead-6f6e-451e-af3c-2634f9797c5e. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1977.225981] env[62619]: DEBUG nova.network.neutron [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updating instance_info_cache with network_info: [{"id": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "address": "fa:16:3e:8b:e2:fc", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f48ead-6f", "ovs_interfaceid": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.282083] env[62619]: DEBUG nova.compute.utils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1977.283529] env[62619]: DEBUG nova.compute.manager [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1977.283784] env[62619]: DEBUG nova.network.neutron [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1977.303542] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.303705] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquired lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.303874] env[62619]: DEBUG nova.network.neutron [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1977.304059] env[62619]: DEBUG nova.objects.instance [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'info_cache' on Instance uuid cec0ea75-042d-4ee5-91d5-cad86456ab97 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1977.340461] env[62619]: DEBUG nova.policy [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a13407c9bfc448bb27a06680d41afb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72ae04936c9b4ea19b5d7fac78c96ba4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 1977.421386] env[62619]: DEBUG nova.compute.manager [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Received event network-vif-plugged-80b186c1-7ddd-465a-9b4d-431a14224046 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1977.421618] env[62619]: DEBUG oslo_concurrency.lockutils [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] Acquiring lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.421898] env[62619]: DEBUG oslo_concurrency.lockutils [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] Lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.422226] env[62619]: DEBUG oslo_concurrency.lockutils [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] Lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.422414] env[62619]: DEBUG nova.compute.manager [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] No waiting events found dispatching network-vif-plugged-80b186c1-7ddd-465a-9b4d-431a14224046 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1977.422607] env[62619]: WARNING nova.compute.manager [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Received unexpected event network-vif-plugged-80b186c1-7ddd-465a-9b4d-431a14224046 for instance with vm_state active and task_state None. [ 1977.422734] env[62619]: DEBUG nova.compute.manager [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Received event network-changed-80b186c1-7ddd-465a-9b4d-431a14224046 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1977.422952] env[62619]: DEBUG nova.compute.manager [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Refreshing instance network info cache due to event network-changed-80b186c1-7ddd-465a-9b4d-431a14224046. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1977.423145] env[62619]: DEBUG oslo_concurrency.lockutils [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] Acquiring lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.670159] env[62619]: DEBUG nova.network.neutron [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Successfully created port: 4e33b817-e043-4b75-92ae-40c8132fcc06 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1977.709628] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.409476} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.709920] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0cde512e-a9f8-4f9b-9e08-41494090e314/0cde512e-a9f8-4f9b-9e08-41494090e314.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1977.710164] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1977.710406] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b99d286a-2d10-4dc7-b02f-e2bdedf70b56 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.716404] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1977.716404] env[62619]: value = "task-1778666" [ 1977.716404] env[62619]: _type = "Task" [ 1977.716404] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.723617] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778666, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.728351] env[62619]: DEBUG oslo_concurrency.lockutils [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] Releasing lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.730416] env[62619]: DEBUG nova.compute.manager [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Received event network-changed-9db75e6b-9462-4d5f-8939-a281ea75ea84 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1977.730416] env[62619]: DEBUG nova.compute.manager [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Refreshing instance network info cache due to event network-changed-9db75e6b-9462-4d5f-8939-a281ea75ea84. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1977.730416] env[62619]: DEBUG oslo_concurrency.lockutils [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] Acquiring lock "refresh_cache-0cde512e-a9f8-4f9b-9e08-41494090e314" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.730416] env[62619]: DEBUG oslo_concurrency.lockutils [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] Acquired lock "refresh_cache-0cde512e-a9f8-4f9b-9e08-41494090e314" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.730416] env[62619]: DEBUG nova.network.neutron [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Refreshing network info cache for port 9db75e6b-9462-4d5f-8939-a281ea75ea84 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1977.731492] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.731723] env[62619]: DEBUG nova.network.neutron [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1977.787781] env[62619]: DEBUG nova.compute.manager [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1977.806745] env[62619]: DEBUG nova.objects.base [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1978.226312] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778666, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085928} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.226593] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1978.227377] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58492f98-20b4-41d3-8cdb-fc309a666326 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.252174] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 0cde512e-a9f8-4f9b-9e08-41494090e314/0cde512e-a9f8-4f9b-9e08-41494090e314.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1978.252448] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8696f3ac-77b4-4417-8496-88b9b360a6cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.271724] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1978.271724] env[62619]: value = "task-1778667" [ 1978.271724] env[62619]: _type = "Task" [ 1978.271724] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.281327] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778667, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.305217] env[62619]: WARNING nova.network.neutron [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] ed952a81-cb24-4b52-a137-9ceeefb896cf already exists in list: networks containing: ['ed952a81-cb24-4b52-a137-9ceeefb896cf']. ignoring it [ 1978.545353] env[62619]: DEBUG nova.network.neutron [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Updated VIF entry in instance network info cache for port 9db75e6b-9462-4d5f-8939-a281ea75ea84. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1978.545793] env[62619]: DEBUG nova.network.neutron [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Updating instance_info_cache with network_info: [{"id": "9db75e6b-9462-4d5f-8939-a281ea75ea84", "address": "fa:16:3e:dd:8b:ee", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db75e6b-94", "ovs_interfaceid": "9db75e6b-9462-4d5f-8939-a281ea75ea84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.703671] env[62619]: DEBUG nova.network.neutron [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Updating instance_info_cache with network_info: [{"id": "0de99671-66a1-4b86-9417-2955fdf1dcba", "address": "fa:16:3e:fc:2a:8e", "network": {"id": "6a1847de-b585-445d-8064-dc33dc365719", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1852054191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4010737526cd4a3aa36f15a187051010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0de99671-66", "ovs_interfaceid": "0de99671-66a1-4b86-9417-2955fdf1dcba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.746609] env[62619]: DEBUG nova.network.neutron [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updating instance_info_cache with network_info: [{"id": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "address": "fa:16:3e:8b:e2:fc", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f48ead-6f", "ovs_interfaceid": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "80b186c1-7ddd-465a-9b4d-431a14224046", "address": "fa:16:3e:d4:a3:f5", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80b186c1-7d", "ovs_interfaceid": "80b186c1-7ddd-465a-9b4d-431a14224046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.781572] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778667, 'name': ReconfigVM_Task, 'duration_secs': 0.42265} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.781968] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 0cde512e-a9f8-4f9b-9e08-41494090e314/0cde512e-a9f8-4f9b-9e08-41494090e314.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1978.782509] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c452d304-42d0-4806-990b-5d8004a3c6c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.790601] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1978.790601] env[62619]: value = "task-1778668" [ 1978.790601] env[62619]: _type = "Task" [ 1978.790601] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.798048] env[62619]: DEBUG nova.compute.manager [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1978.803435] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778668, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.827961] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1978.828224] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1978.828378] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1978.828554] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1978.828697] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1978.828840] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1978.829056] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1978.829219] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1978.829386] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1978.829545] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1978.829720] env[62619]: DEBUG nova.virt.hardware [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1978.830982] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e124ad-7f1f-4f66-a73a-754141e0a693 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.838426] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40a0a06-3a24-4492-83ca-2ff0f951e11a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.049107] env[62619]: DEBUG oslo_concurrency.lockutils [req-231ed82a-fde9-4309-8ce4-5be8d7fe8527 req-6a781339-5851-439c-934e-be2e70f18430 service nova] Releasing lock "refresh_cache-0cde512e-a9f8-4f9b-9e08-41494090e314" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.126797] env[62619]: DEBUG nova.network.neutron [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Successfully updated port: 4e33b817-e043-4b75-92ae-40c8132fcc06 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1979.206415] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Releasing lock "refresh_cache-cec0ea75-042d-4ee5-91d5-cad86456ab97" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.249461] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.250154] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.250313] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.250575] env[62619]: DEBUG oslo_concurrency.lockutils [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] Acquired lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.250769] env[62619]: DEBUG nova.network.neutron [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Refreshing network info cache for port 80b186c1-7ddd-465a-9b4d-431a14224046 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1979.252397] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a895c739-1d90-48d1-92f8-710999759afe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.269598] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1979.269841] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1979.270014] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1979.270216] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1979.270361] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1979.270504] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1979.270703] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1979.270864] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1979.271040] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1979.271205] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1979.271374] env[62619]: DEBUG nova.virt.hardware [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1979.277512] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Reconfiguring VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1979.278019] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a821c8f-33fe-4bf6-9ce9-726f09e15979 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.295423] env[62619]: DEBUG oslo_vmware.api [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1979.295423] env[62619]: value = "task-1778669" [ 1979.295423] env[62619]: _type = "Task" [ 1979.295423] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.301289] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778668, 'name': Rename_Task, 'duration_secs': 0.237988} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.301852] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1979.302087] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79bab8b4-a928-413a-980a-70421545ba46 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.306008] env[62619]: DEBUG oslo_vmware.api [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778669, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.309635] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 1979.309635] env[62619]: value = "task-1778670" [ 1979.309635] env[62619]: _type = "Task" [ 1979.309635] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.318617] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.446980] env[62619]: DEBUG nova.compute.manager [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Received event network-vif-plugged-4e33b817-e043-4b75-92ae-40c8132fcc06 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1979.447261] env[62619]: DEBUG oslo_concurrency.lockutils [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] Acquiring lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.447458] env[62619]: DEBUG oslo_concurrency.lockutils [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.447621] env[62619]: DEBUG oslo_concurrency.lockutils [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.447799] env[62619]: DEBUG nova.compute.manager [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] No waiting events found dispatching network-vif-plugged-4e33b817-e043-4b75-92ae-40c8132fcc06 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1979.447948] env[62619]: WARNING nova.compute.manager [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Received unexpected event network-vif-plugged-4e33b817-e043-4b75-92ae-40c8132fcc06 for instance with vm_state building and task_state spawning. [ 1979.448140] env[62619]: DEBUG nova.compute.manager [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Received event network-changed-4e33b817-e043-4b75-92ae-40c8132fcc06 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1979.448301] env[62619]: DEBUG nova.compute.manager [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Refreshing instance network info cache due to event network-changed-4e33b817-e043-4b75-92ae-40c8132fcc06. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1979.448477] env[62619]: DEBUG oslo_concurrency.lockutils [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] Acquiring lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.448625] env[62619]: DEBUG oslo_concurrency.lockutils [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] Acquired lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.448776] env[62619]: DEBUG nova.network.neutron [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Refreshing network info cache for port 4e33b817-e043-4b75-92ae-40c8132fcc06 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1979.629440] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.809202] env[62619]: DEBUG oslo_vmware.api [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778669, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.818805] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778670, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.984968] env[62619]: DEBUG nova.network.neutron [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1980.026514] env[62619]: DEBUG nova.network.neutron [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updated VIF entry in instance network info cache for port 80b186c1-7ddd-465a-9b4d-431a14224046. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1980.027054] env[62619]: DEBUG nova.network.neutron [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updating instance_info_cache with network_info: [{"id": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "address": "fa:16:3e:8b:e2:fc", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f48ead-6f", "ovs_interfaceid": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "80b186c1-7ddd-465a-9b4d-431a14224046", "address": "fa:16:3e:d4:a3:f5", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80b186c1-7d", "ovs_interfaceid": "80b186c1-7ddd-465a-9b4d-431a14224046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.078417] env[62619]: DEBUG nova.network.neutron [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.212978] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1980.213343] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0cd8b76e-bfa9-46c3-8dad-69b520a67879 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.220955] env[62619]: DEBUG oslo_vmware.api [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 1980.220955] env[62619]: value = "task-1778671" [ 1980.220955] env[62619]: _type = "Task" [ 1980.220955] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.228628] env[62619]: DEBUG oslo_vmware.api [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.305716] env[62619]: DEBUG oslo_vmware.api [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778669, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.318120] env[62619]: DEBUG oslo_vmware.api [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778670, 'name': PowerOnVM_Task, 'duration_secs': 0.588434} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.318369] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1980.318647] env[62619]: INFO nova.compute.manager [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Took 7.56 seconds to spawn the instance on the hypervisor. [ 1980.318727] env[62619]: DEBUG nova.compute.manager [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1980.319468] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fd5648-7022-4e32-859d-f6c128c8e728 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.533026] env[62619]: DEBUG oslo_concurrency.lockutils [req-ff4d6d81-43fa-4fe6-af78-17933d9200cf req-e515e98c-8672-4de6-b353-91fd01d0e23f service nova] Releasing lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.580984] env[62619]: DEBUG oslo_concurrency.lockutils [req-06366bb6-1c05-43b9-86a8-fda3dce12f7c req-c94f3c6f-b9b0-4f87-911d-8b877d7bb050 service nova] Releasing lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.581400] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.581568] env[62619]: DEBUG nova.network.neutron [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1980.732531] env[62619]: DEBUG oslo_vmware.api [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778671, 'name': PowerOnVM_Task, 'duration_secs': 0.456777} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.732937] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1980.733298] env[62619]: DEBUG nova.compute.manager [None req-fe5c6dea-4106-4615-8392-c70c9edf2963 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1980.734550] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ef0fa8-5738-4ebf-b225-a18306722de3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.807958] env[62619]: DEBUG oslo_vmware.api [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778669, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.837877] env[62619]: INFO nova.compute.manager [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Took 12.40 seconds to build instance. [ 1981.125905] env[62619]: DEBUG nova.network.neutron [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1981.270889] env[62619]: DEBUG nova.network.neutron [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Updating instance_info_cache with network_info: [{"id": "4e33b817-e043-4b75-92ae-40c8132fcc06", "address": "fa:16:3e:bf:96:2b", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e33b817-e0", "ovs_interfaceid": "4e33b817-e043-4b75-92ae-40c8132fcc06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.307616] env[62619]: DEBUG oslo_vmware.api [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778669, 'name': ReconfigVM_Task, 'duration_secs': 1.687168} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.308152] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.308367] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Reconfigured VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1981.340048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b1ec291a-4c29-47cf-99e3-0395ee88dd99 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "0cde512e-a9f8-4f9b-9e08-41494090e314" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.908s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.473246] env[62619]: DEBUG nova.compute.manager [req-9df2d637-b84d-4fcd-a45a-d6329cb0a65b req-529cb169-c1bf-4202-8c53-972047252e5b service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Received event network-changed-9db75e6b-9462-4d5f-8939-a281ea75ea84 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1981.473450] env[62619]: DEBUG nova.compute.manager [req-9df2d637-b84d-4fcd-a45a-d6329cb0a65b req-529cb169-c1bf-4202-8c53-972047252e5b service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Refreshing instance network info cache due to event network-changed-9db75e6b-9462-4d5f-8939-a281ea75ea84. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1981.473657] env[62619]: DEBUG oslo_concurrency.lockutils [req-9df2d637-b84d-4fcd-a45a-d6329cb0a65b req-529cb169-c1bf-4202-8c53-972047252e5b service nova] Acquiring lock "refresh_cache-0cde512e-a9f8-4f9b-9e08-41494090e314" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.473798] env[62619]: DEBUG oslo_concurrency.lockutils [req-9df2d637-b84d-4fcd-a45a-d6329cb0a65b req-529cb169-c1bf-4202-8c53-972047252e5b service nova] Acquired lock "refresh_cache-0cde512e-a9f8-4f9b-9e08-41494090e314" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1981.473959] env[62619]: DEBUG nova.network.neutron [req-9df2d637-b84d-4fcd-a45a-d6329cb0a65b req-529cb169-c1bf-4202-8c53-972047252e5b service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Refreshing network info cache for port 9db75e6b-9462-4d5f-8939-a281ea75ea84 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1981.773165] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.773503] env[62619]: DEBUG nova.compute.manager [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Instance network_info: |[{"id": "4e33b817-e043-4b75-92ae-40c8132fcc06", "address": "fa:16:3e:bf:96:2b", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e33b817-e0", "ovs_interfaceid": "4e33b817-e043-4b75-92ae-40c8132fcc06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1981.773930] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:96:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e33b817-e043-4b75-92ae-40c8132fcc06', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1981.781388] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Creating folder: Project (72ae04936c9b4ea19b5d7fac78c96ba4). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1981.781666] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2024be8e-f80d-4ea1-8cf2-a71f84e59152 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.791823] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Created folder: Project (72ae04936c9b4ea19b5d7fac78c96ba4) in parent group-v368875. [ 1981.791998] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Creating folder: Instances. Parent ref: group-v369194. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1981.792254] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c509463-bab8-4437-b48f-5f5a7406d38e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.800210] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Created folder: Instances in parent group-v369194. [ 1981.800455] env[62619]: DEBUG oslo.service.loopingcall [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1981.800646] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1981.800845] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37b1453f-7b50-49e4-acfb-4023c4901d0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.815998] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98950168-27d7-47c2-8a30-24889bacce05 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-9e69e4ab-f219-4ba8-8f66-6063d8ab242a-80b186c1-7ddd-465a-9b4d-431a14224046" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.725s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.821343] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1981.821343] env[62619]: value = "task-1778674" [ 1981.821343] env[62619]: _type = "Task" [ 1981.821343] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.828791] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778674, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.194117] env[62619]: DEBUG nova.network.neutron [req-9df2d637-b84d-4fcd-a45a-d6329cb0a65b req-529cb169-c1bf-4202-8c53-972047252e5b service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Updated VIF entry in instance network info cache for port 9db75e6b-9462-4d5f-8939-a281ea75ea84. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1982.194547] env[62619]: DEBUG nova.network.neutron [req-9df2d637-b84d-4fcd-a45a-d6329cb0a65b req-529cb169-c1bf-4202-8c53-972047252e5b service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Updating instance_info_cache with network_info: [{"id": "9db75e6b-9462-4d5f-8939-a281ea75ea84", "address": "fa:16:3e:dd:8b:ee", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db75e6b-94", "ovs_interfaceid": "9db75e6b-9462-4d5f-8939-a281ea75ea84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.331148] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778674, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.697724] env[62619]: DEBUG oslo_concurrency.lockutils [req-9df2d637-b84d-4fcd-a45a-d6329cb0a65b req-529cb169-c1bf-4202-8c53-972047252e5b service nova] Releasing lock "refresh_cache-0cde512e-a9f8-4f9b-9e08-41494090e314" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.831976] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778674, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.278149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "interface-9e69e4ab-f219-4ba8-8f66-6063d8ab242a-80b186c1-7ddd-465a-9b4d-431a14224046" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.278624] env[62619]: DEBUG oslo_concurrency.lockutils [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-9e69e4ab-f219-4ba8-8f66-6063d8ab242a-80b186c1-7ddd-465a-9b4d-431a14224046" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.332049] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778674, 'name': CreateVM_Task, 'duration_secs': 1.402138} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.332230] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1983.332848] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.333022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.333383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1983.333646] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75b51f1c-6f39-4330-9589-81845a83a334 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.338571] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 1983.338571] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529263d8-7599-38b6-1ecb-f66d91f1633e" [ 1983.338571] env[62619]: _type = "Task" [ 1983.338571] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.346122] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529263d8-7599-38b6-1ecb-f66d91f1633e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.781310] env[62619]: DEBUG oslo_concurrency.lockutils [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.781687] env[62619]: DEBUG oslo_concurrency.lockutils [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.783014] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306f2fdb-493d-408f-a77b-41906aec95bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.800966] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4ce830-e26e-4ccc-a987-b7bc291570e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.826408] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Reconfiguring VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1983.826655] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67351f16-17b5-4a09-99a9-022349802f24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.850267] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529263d8-7599-38b6-1ecb-f66d91f1633e, 'name': SearchDatastore_Task, 'duration_secs': 0.011843} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.851446] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.851677] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1983.851900] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.852058] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.852247] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1983.852536] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1983.852536] env[62619]: value = "task-1778675" [ 1983.852536] env[62619]: _type = "Task" [ 1983.852536] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.852712] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f88eddf1-7ba5-4a2b-93bb-24756e8535de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.862442] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.865921] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1983.866105] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1983.866777] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c6def87-1a08-44ca-ba6e-fdcc2a5c8045 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.872105] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 1983.872105] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5204311b-6244-671b-49eb-3dced5964e6e" [ 1983.872105] env[62619]: _type = "Task" [ 1983.872105] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.880362] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5204311b-6244-671b-49eb-3dced5964e6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.364334] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.380469] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5204311b-6244-671b-49eb-3dced5964e6e, 'name': SearchDatastore_Task, 'duration_secs': 0.021554} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.381323] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15091dd0-748a-4905-9c21-89d9f3141435 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.385958] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 1984.385958] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52760f4b-d62b-519e-b5f8-4ba2b1fa297b" [ 1984.385958] env[62619]: _type = "Task" [ 1984.385958] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.394368] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52760f4b-d62b-519e-b5f8-4ba2b1fa297b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.864813] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.896077] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52760f4b-d62b-519e-b5f8-4ba2b1fa297b, 'name': SearchDatastore_Task, 'duration_secs': 0.030818} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.896361] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.896607] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e599f5ad-1b4d-4912-9b96-2544c52b0acf/e599f5ad-1b4d-4912-9b96-2544c52b0acf.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1984.896859] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afa9bf11-aa02-4c1d-adf2-b537f44da19f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.903838] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 1984.903838] env[62619]: value = "task-1778676" [ 1984.903838] env[62619]: _type = "Task" [ 1984.903838] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.911271] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.365310] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.413514] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778676, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.375023} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.413811] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e599f5ad-1b4d-4912-9b96-2544c52b0acf/e599f5ad-1b4d-4912-9b96-2544c52b0acf.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1985.414020] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1985.414267] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf0a3520-3b6a-440d-b1b1-48adbd08b58a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.420026] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 1985.420026] env[62619]: value = "task-1778677" [ 1985.420026] env[62619]: _type = "Task" [ 1985.420026] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.428662] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778677, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.866672] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.930014] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778677, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128461} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.930291] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1985.931057] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dc0bfa-fc43-4732-bd04-e731bd9a5577 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.953327] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] e599f5ad-1b4d-4912-9b96-2544c52b0acf/e599f5ad-1b4d-4912-9b96-2544c52b0acf.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1985.953554] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30e168de-ee13-404a-80a5-753daec97899 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.973415] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 1985.973415] env[62619]: value = "task-1778678" [ 1985.973415] env[62619]: _type = "Task" [ 1985.973415] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.984474] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778678, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.367675] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.482957] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778678, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.868263] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.984029] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778678, 'name': ReconfigVM_Task, 'duration_secs': 0.801381} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.984309] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Reconfigured VM instance instance-00000075 to attach disk [datastore1] e599f5ad-1b4d-4912-9b96-2544c52b0acf/e599f5ad-1b4d-4912-9b96-2544c52b0acf.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1986.984927] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-828027ed-92b5-4349-9cd8-10ae970a66a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.990662] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 1986.990662] env[62619]: value = "task-1778679" [ 1986.990662] env[62619]: _type = "Task" [ 1986.990662] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.998365] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778679, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.368421] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.501829] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778679, 'name': Rename_Task, 'duration_secs': 0.170079} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.502128] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1987.502398] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-633d5352-67e5-471f-8f2a-e37aacdb4397 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.508823] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 1987.508823] env[62619]: value = "task-1778680" [ 1987.508823] env[62619]: _type = "Task" [ 1987.508823] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.516645] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778680, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.868975] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.018925] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778680, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.369597] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.523512] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778680, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.874090] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.020563] env[62619]: DEBUG oslo_vmware.api [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778680, 'name': PowerOnVM_Task, 'duration_secs': 1.511309} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.020818] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1989.021031] env[62619]: INFO nova.compute.manager [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Took 10.22 seconds to spawn the instance on the hypervisor. [ 1989.021205] env[62619]: DEBUG nova.compute.manager [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1989.022043] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6888148c-1603-4bf0-8afe-7a90cf3708d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.370796] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.540868] env[62619]: INFO nova.compute.manager [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Took 14.95 seconds to build instance. [ 1989.871016] env[62619]: DEBUG oslo_vmware.api [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778675, 'name': ReconfigVM_Task, 'duration_secs': 5.878549} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.871307] env[62619]: DEBUG oslo_concurrency.lockutils [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.871515] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Reconfigured VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1990.042747] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6838af02-5e23-4dfa-9141-bc27d7368bb5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.460s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.258123] env[62619]: DEBUG nova.compute.manager [req-138f473f-96ba-4b5f-b6ca-22a3ee6c9075 req-4d9e3f8b-7b55-459d-9b46-5d130a939de8 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Received event network-changed-4e33b817-e043-4b75-92ae-40c8132fcc06 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1990.258123] env[62619]: DEBUG nova.compute.manager [req-138f473f-96ba-4b5f-b6ca-22a3ee6c9075 req-4d9e3f8b-7b55-459d-9b46-5d130a939de8 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Refreshing instance network info cache due to event network-changed-4e33b817-e043-4b75-92ae-40c8132fcc06. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1990.258123] env[62619]: DEBUG oslo_concurrency.lockutils [req-138f473f-96ba-4b5f-b6ca-22a3ee6c9075 req-4d9e3f8b-7b55-459d-9b46-5d130a939de8 service nova] Acquiring lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1990.258123] env[62619]: DEBUG oslo_concurrency.lockutils [req-138f473f-96ba-4b5f-b6ca-22a3ee6c9075 req-4d9e3f8b-7b55-459d-9b46-5d130a939de8 service nova] Acquired lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1990.258123] env[62619]: DEBUG nova.network.neutron [req-138f473f-96ba-4b5f-b6ca-22a3ee6c9075 req-4d9e3f8b-7b55-459d-9b46-5d130a939de8 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Refreshing network info cache for port 4e33b817-e043-4b75-92ae-40c8132fcc06 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1990.961681] env[62619]: DEBUG nova.network.neutron [req-138f473f-96ba-4b5f-b6ca-22a3ee6c9075 req-4d9e3f8b-7b55-459d-9b46-5d130a939de8 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Updated VIF entry in instance network info cache for port 4e33b817-e043-4b75-92ae-40c8132fcc06. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1990.962064] env[62619]: DEBUG nova.network.neutron [req-138f473f-96ba-4b5f-b6ca-22a3ee6c9075 req-4d9e3f8b-7b55-459d-9b46-5d130a939de8 service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Updating instance_info_cache with network_info: [{"id": "4e33b817-e043-4b75-92ae-40c8132fcc06", "address": "fa:16:3e:bf:96:2b", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e33b817-e0", "ovs_interfaceid": "4e33b817-e043-4b75-92ae-40c8132fcc06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.204098] env[62619]: DEBUG oslo_concurrency.lockutils [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1991.204286] env[62619]: DEBUG oslo_concurrency.lockutils [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquired lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1991.204457] env[62619]: DEBUG nova.network.neutron [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1991.464943] env[62619]: DEBUG oslo_concurrency.lockutils [req-138f473f-96ba-4b5f-b6ca-22a3ee6c9075 req-4d9e3f8b-7b55-459d-9b46-5d130a939de8 service nova] Releasing lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.700184] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.700487] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.700703] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.700900] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.701119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.703400] env[62619]: INFO nova.compute.manager [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Terminating instance [ 1991.920729] env[62619]: INFO nova.network.neutron [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Port 80b186c1-7ddd-465a-9b4d-431a14224046 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1991.921128] env[62619]: DEBUG nova.network.neutron [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updating instance_info_cache with network_info: [{"id": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "address": "fa:16:3e:8b:e2:fc", "network": {"id": "ed952a81-cb24-4b52-a137-9ceeefb896cf", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1902919029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d77e73a09d492695fbfe6ac2c93371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f48ead-6f", "ovs_interfaceid": "56f48ead-6f6e-451e-af3c-2634f9797c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.207656] env[62619]: DEBUG nova.compute.manager [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1992.208157] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1992.208855] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de5f1aa-98c9-49df-a38f-8d83e63cabc1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.216405] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1992.216624] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa787032-d4c3-4c0a-ad7b-9ca96ff0b03e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.222291] env[62619]: DEBUG oslo_vmware.api [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1992.222291] env[62619]: value = "task-1778681" [ 1992.222291] env[62619]: _type = "Task" [ 1992.222291] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.230189] env[62619]: DEBUG oslo_vmware.api [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778681, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.423922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Releasing lock "refresh_cache-9e69e4ab-f219-4ba8-8f66-6063d8ab242a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1992.731548] env[62619]: DEBUG oslo_vmware.api [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778681, 'name': PowerOffVM_Task, 'duration_secs': 0.256971} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.731793] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1992.731919] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1992.732183] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ae8be65-7a34-4304-8ff8-b50161658f3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.844256] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1992.844496] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1992.844649] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleting the datastore file [datastore1] 9e69e4ab-f219-4ba8-8f66-6063d8ab242a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1992.844914] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aedec6fe-b226-4824-8132-6b67420095e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.851794] env[62619]: DEBUG oslo_vmware.api [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1992.851794] env[62619]: value = "task-1778683" [ 1992.851794] env[62619]: _type = "Task" [ 1992.851794] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.858987] env[62619]: DEBUG oslo_vmware.api [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.928154] env[62619]: DEBUG oslo_concurrency.lockutils [None req-509ad7ee-fd37-40fb-9a0c-0ae3d450e0d9 tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "interface-9e69e4ab-f219-4ba8-8f66-6063d8ab242a-80b186c1-7ddd-465a-9b4d-431a14224046" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.650s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.362215] env[62619]: DEBUG oslo_vmware.api [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13643} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.362607] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1993.362652] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1993.362819] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1993.362984] env[62619]: INFO nova.compute.manager [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1993.363273] env[62619]: DEBUG oslo.service.loopingcall [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1993.363493] env[62619]: DEBUG nova.compute.manager [-] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1993.363608] env[62619]: DEBUG nova.network.neutron [-] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1993.974556] env[62619]: DEBUG nova.compute.manager [req-27dcb0ca-955c-4e7d-8089-92ab3ec774ce req-33357eb3-b2ef-4b2d-a41f-de555854e6d1 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Received event network-vif-deleted-56f48ead-6f6e-451e-af3c-2634f9797c5e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1993.974708] env[62619]: INFO nova.compute.manager [req-27dcb0ca-955c-4e7d-8089-92ab3ec774ce req-33357eb3-b2ef-4b2d-a41f-de555854e6d1 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Neutron deleted interface 56f48ead-6f6e-451e-af3c-2634f9797c5e; detaching it from the instance and deleting it from the info cache [ 1993.974919] env[62619]: DEBUG nova.network.neutron [req-27dcb0ca-955c-4e7d-8089-92ab3ec774ce req-33357eb3-b2ef-4b2d-a41f-de555854e6d1 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1994.453317] env[62619]: DEBUG nova.network.neutron [-] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1994.477781] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55da4702-9db1-4cda-a2cd-01608c8ae31c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.487332] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251678ef-cbef-4b43-8e21-2443b44c18bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.516114] env[62619]: DEBUG nova.compute.manager [req-27dcb0ca-955c-4e7d-8089-92ab3ec774ce req-33357eb3-b2ef-4b2d-a41f-de555854e6d1 service nova] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Detach interface failed, port_id=56f48ead-6f6e-451e-af3c-2634f9797c5e, reason: Instance 9e69e4ab-f219-4ba8-8f66-6063d8ab242a could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1994.955888] env[62619]: INFO nova.compute.manager [-] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Took 1.59 seconds to deallocate network for instance. [ 1995.462844] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.463173] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.463438] env[62619]: DEBUG nova.objects.instance [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'resources' on Instance uuid 9e69e4ab-f219-4ba8-8f66-6063d8ab242a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1995.932896] env[62619]: DEBUG oslo_concurrency.lockutils [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.933172] env[62619]: DEBUG oslo_concurrency.lockutils [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.933385] env[62619]: DEBUG oslo_concurrency.lockutils [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.933565] env[62619]: DEBUG oslo_concurrency.lockutils [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.933735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.935890] env[62619]: INFO nova.compute.manager [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Terminating instance [ 1996.064523] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9b6be8-6ba3-4df6-b704-a4ef6872767b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.072586] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244f9d9e-638b-4e3e-9dfd-56afe684b4a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.102693] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63387f19-2e7d-401a-9291-e6de77d67d64 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.109668] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32db6ad9-446a-433f-a4cf-a55319f1ab10 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.122863] env[62619]: DEBUG nova.compute.provider_tree [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1996.439442] env[62619]: DEBUG nova.compute.manager [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1996.439692] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1996.440660] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b1610f-0177-4f22-9cb2-14d77ae6168c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.449396] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1996.449661] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ae676ce-fe83-47c8-9bc0-4c7961597c66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.455916] env[62619]: DEBUG oslo_vmware.api [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1996.455916] env[62619]: value = "task-1778684" [ 1996.455916] env[62619]: _type = "Task" [ 1996.455916] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.463970] env[62619]: DEBUG oslo_vmware.api [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778684, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.625847] env[62619]: DEBUG nova.scheduler.client.report [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1996.967823] env[62619]: DEBUG oslo_vmware.api [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778684, 'name': PowerOffVM_Task, 'duration_secs': 0.155099} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.968054] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1996.968266] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1996.968529] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30bad183-81a5-4d0d-b431-b6a881afd344 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.082652] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1997.082884] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1997.083081] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleting the datastore file [datastore1] 13eeb4aa-0f20-4aed-9453-66afb0ff1152 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1997.083356] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d6317c8-dc2e-4de9-a984-7489d67c02bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.089757] env[62619]: DEBUG oslo_vmware.api [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for the task: (returnval){ [ 1997.089757] env[62619]: value = "task-1778686" [ 1997.089757] env[62619]: _type = "Task" [ 1997.089757] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.097459] env[62619]: DEBUG oslo_vmware.api [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.130332] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.152691] env[62619]: INFO nova.scheduler.client.report [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleted allocations for instance 9e69e4ab-f219-4ba8-8f66-6063d8ab242a [ 1997.600632] env[62619]: DEBUG oslo_vmware.api [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Task: {'id': task-1778686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195864} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.600632] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1997.600632] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1997.600632] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1997.601175] env[62619]: INFO nova.compute.manager [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1997.601175] env[62619]: DEBUG oslo.service.loopingcall [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1997.601244] env[62619]: DEBUG nova.compute.manager [-] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1997.601315] env[62619]: DEBUG nova.network.neutron [-] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1997.663194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38fc222d-fdc9-49c0-998f-6ebf7e1a07fd tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "9e69e4ab-f219-4ba8-8f66-6063d8ab242a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.962s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.091357] env[62619]: DEBUG nova.compute.manager [req-6b0f193c-13f6-432f-a017-7f63f75f7c99 req-d748f5f2-79e4-4660-9a95-434d73055651 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Received event network-vif-deleted-b52c0c61-cdaa-4ec8-b935-3229b930c548 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1998.091581] env[62619]: INFO nova.compute.manager [req-6b0f193c-13f6-432f-a017-7f63f75f7c99 req-d748f5f2-79e4-4660-9a95-434d73055651 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Neutron deleted interface b52c0c61-cdaa-4ec8-b935-3229b930c548; detaching it from the instance and deleting it from the info cache [ 1998.091714] env[62619]: DEBUG nova.network.neutron [req-6b0f193c-13f6-432f-a017-7f63f75f7c99 req-d748f5f2-79e4-4660-9a95-434d73055651 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1998.274663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.274942] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.275170] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.275352] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.275533] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.278214] env[62619]: INFO nova.compute.manager [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Terminating instance [ 1998.568392] env[62619]: DEBUG nova.network.neutron [-] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1998.594694] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0db1c493-be43-41e8-9dfe-1ebcb4652240 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.604261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cd53ff-51a2-4f3c-86be-a19137e59763 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.631584] env[62619]: DEBUG nova.compute.manager [req-6b0f193c-13f6-432f-a017-7f63f75f7c99 req-d748f5f2-79e4-4660-9a95-434d73055651 service nova] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Detach interface failed, port_id=b52c0c61-cdaa-4ec8-b935-3229b930c548, reason: Instance 13eeb4aa-0f20-4aed-9453-66afb0ff1152 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1998.709027] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.709146] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1998.781917] env[62619]: DEBUG nova.compute.manager [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1998.782321] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1998.783691] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c835557b-3aeb-426c-bbc7-163fc41aa494 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.791496] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1998.791736] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6edad27-ab13-4968-b7d6-f94b1f085fba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.797097] env[62619]: DEBUG oslo_vmware.api [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1998.797097] env[62619]: value = "task-1778687" [ 1998.797097] env[62619]: _type = "Task" [ 1998.797097] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.806086] env[62619]: DEBUG oslo_vmware.api [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778687, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.071928] env[62619]: INFO nova.compute.manager [-] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Took 1.47 seconds to deallocate network for instance. [ 1999.306487] env[62619]: DEBUG oslo_vmware.api [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778687, 'name': PowerOffVM_Task, 'duration_secs': 0.206781} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.306749] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1999.306915] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1999.307170] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98869d1a-3ff0-44a0-a06c-34c44ed347ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.377322] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1999.377498] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1999.377675] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleting the datastore file [datastore1] a2064f8f-b928-44c0-86d8-c0bb9882dbde {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1999.377930] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fe84b60-0520-4225-8493-24c65cecd461 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.383863] env[62619]: DEBUG oslo_vmware.api [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for the task: (returnval){ [ 1999.383863] env[62619]: value = "task-1778689" [ 1999.383863] env[62619]: _type = "Task" [ 1999.383863] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.391535] env[62619]: DEBUG oslo_vmware.api [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778689, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.578179] env[62619]: DEBUG oslo_concurrency.lockutils [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1999.578476] env[62619]: DEBUG oslo_concurrency.lockutils [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.578706] env[62619]: DEBUG nova.objects.instance [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lazy-loading 'resources' on Instance uuid 13eeb4aa-0f20-4aed-9453-66afb0ff1152 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1999.894474] env[62619]: DEBUG oslo_vmware.api [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Task: {'id': task-1778689, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087198} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.894769] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1999.894965] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1999.895159] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1999.895343] env[62619]: INFO nova.compute.manager [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1999.895577] env[62619]: DEBUG oslo.service.loopingcall [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1999.895766] env[62619]: DEBUG nova.compute.manager [-] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1999.895857] env[62619]: DEBUG nova.network.neutron [-] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2000.143149] env[62619]: DEBUG nova.compute.manager [req-f811f5f6-4293-46fa-a3a3-c6a626efbd25 req-f5dc2c58-6902-4ba3-8dd7-f373e3d2cf96 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Received event network-vif-deleted-35375282-c697-45eb-a87c-d85555a9012b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2000.143376] env[62619]: INFO nova.compute.manager [req-f811f5f6-4293-46fa-a3a3-c6a626efbd25 req-f5dc2c58-6902-4ba3-8dd7-f373e3d2cf96 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Neutron deleted interface 35375282-c697-45eb-a87c-d85555a9012b; detaching it from the instance and deleting it from the info cache [ 2000.143548] env[62619]: DEBUG nova.network.neutron [req-f811f5f6-4293-46fa-a3a3-c6a626efbd25 req-f5dc2c58-6902-4ba3-8dd7-f373e3d2cf96 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2000.178692] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcb6be9-550f-4dc9-9d4e-002417f6deea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.186842] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861e8afa-f6b8-4249-8dfd-301f98877901 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.217364] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57797939-4ef1-4c3a-9d94-fee6d1efdab0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.224984] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9baaedb2-2f3a-49ac-b9d3-f472c94b78f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.237981] env[62619]: DEBUG nova.compute.provider_tree [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2000.623188] env[62619]: DEBUG nova.network.neutron [-] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2000.648152] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-708c4f1c-cd71-4582-b075-27150b25d971 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.657563] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa45a6c2-97d1-4a2a-b43d-fbff2254f09f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.684830] env[62619]: DEBUG nova.compute.manager [req-f811f5f6-4293-46fa-a3a3-c6a626efbd25 req-f5dc2c58-6902-4ba3-8dd7-f373e3d2cf96 service nova] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Detach interface failed, port_id=35375282-c697-45eb-a87c-d85555a9012b, reason: Instance a2064f8f-b928-44c0-86d8-c0bb9882dbde could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2000.741349] env[62619]: DEBUG nova.scheduler.client.report [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2001.126218] env[62619]: INFO nova.compute.manager [-] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Took 1.23 seconds to deallocate network for instance. [ 2001.245940] env[62619]: DEBUG oslo_concurrency.lockutils [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.267198] env[62619]: INFO nova.scheduler.client.report [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Deleted allocations for instance 13eeb4aa-0f20-4aed-9453-66afb0ff1152 [ 2001.632867] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.633225] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.633514] env[62619]: DEBUG nova.objects.instance [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lazy-loading 'resources' on Instance uuid a2064f8f-b928-44c0-86d8-c0bb9882dbde {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2001.774532] env[62619]: DEBUG oslo_concurrency.lockutils [None req-125881e8-bf3a-4466-b04a-95fbc464a7ad tempest-AttachVolumeShelveTestJSON-739994717 tempest-AttachVolumeShelveTestJSON-739994717-project-member] Lock "13eeb4aa-0f20-4aed-9453-66afb0ff1152" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.841s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.221608] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9e497a-f622-4e80-b72e-58958a9a048b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.229572] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37652101-8e72-4bcb-98f4-8a384ea1b841 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.259828] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79508209-cff0-422e-adc7-9ea926be4e15 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.267423] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37addb6b-dddb-480a-8edf-0ce7a8caecd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.282352] env[62619]: DEBUG nova.compute.provider_tree [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2002.709162] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.785699] env[62619]: DEBUG nova.scheduler.client.report [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2003.290790] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.657s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.312337] env[62619]: INFO nova.scheduler.client.report [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Deleted allocations for instance a2064f8f-b928-44c0-86d8-c0bb9882dbde [ 2003.709088] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2003.709362] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2003.820036] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c4e97f92-62e1-4d65-a6fa-b7de9792394f tempest-AttachInterfacesTestJSON-654412364 tempest-AttachInterfacesTestJSON-654412364-project-member] Lock "a2064f8f-b928-44c0-86d8-c0bb9882dbde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.545s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.705181] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.211370] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.714723] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.715022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.715022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.715114] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2005.716285] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b008c18-bb68-41a2-b832-94d5203b0020 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.725295] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f708d7b-92ab-4e74-9ede-88fe431265bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.738255] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5602b74c-241a-4dab-9062-26dd520ca9e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.744828] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eed997a-fc05-4288-8a36-3f40406a3c97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.775823] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180378MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2005.775984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.776196] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.806717] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4c66bbdf-af6a-4705-8219-85cf19f8314e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2006.806951] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance cec0ea75-042d-4ee5-91d5-cad86456ab97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2006.806995] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 0cde512e-a9f8-4f9b-9e08-41494090e314 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2006.807130] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e599f5ad-1b4d-4912-9b96-2544c52b0acf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2006.807319] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2006.807497] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2006.866780] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b45b5f-aba4-4af9-be81-780c24ebed23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.874535] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2eb485-38c9-40ab-856e-f9a1a9348d97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.904209] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63f3187-50cc-40eb-8dcf-dff2f2ef6a8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.910671] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befd6b2f-89ef-43d1-a830-806bb6926211 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.923469] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2007.426550] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2007.934803] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2007.934803] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.157s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.431646] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2008.432147] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2008.432281] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 2008.432402] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 2008.969403] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.969635] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquired lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.969683] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2008.969833] env[62619]: DEBUG nova.objects.instance [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lazy-loading 'info_cache' on Instance uuid 4c66bbdf-af6a-4705-8219-85cf19f8314e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2009.304904] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2009.305140] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.807366] env[62619]: DEBUG nova.compute.manager [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2010.329243] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.329526] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.331074] env[62619]: INFO nova.compute.claims [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2010.685031] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [{"id": "b1a6212d-63f4-4343-9100-d88707a89c10", "address": "fa:16:3e:48:b2:0f", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a6212d-63", "ovs_interfaceid": "b1a6212d-63f4-4343-9100-d88707a89c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.187125] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Releasing lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.187345] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 2011.187559] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2011.187719] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2011.406544] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c576480f-71ab-40c2-9fed-9351cd37779f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.414017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12f0ba7-f867-4dd4-8154-ed5544b54b05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.443666] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821a68ee-cc59-4f4d-92a8-3b2f13263700 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.450846] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3c026f-96b9-420a-b0d8-0265909a7e64 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.463629] env[62619]: DEBUG nova.compute.provider_tree [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2011.966842] env[62619]: DEBUG nova.scheduler.client.report [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2012.472141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.142s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.472825] env[62619]: DEBUG nova.compute.manager [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2012.977574] env[62619]: DEBUG nova.compute.utils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2012.979031] env[62619]: DEBUG nova.compute.manager [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2012.979202] env[62619]: DEBUG nova.network.neutron [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2013.019118] env[62619]: DEBUG nova.policy [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '960fb31864a64d15a0ef3833e622dacb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f078f46880b423dae58d87e31f291d3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 2013.253965] env[62619]: DEBUG nova.network.neutron [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Successfully created port: 5186dbd9-28b8-4b21-91a1-4cf5100781e6 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2013.482749] env[62619]: DEBUG nova.compute.manager [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2014.493269] env[62619]: DEBUG nova.compute.manager [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2014.521459] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2014.521731] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2014.521886] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2014.522078] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2014.522225] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2014.522368] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2014.522572] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2014.522730] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2014.522890] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2014.523074] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2014.523245] env[62619]: DEBUG nova.virt.hardware [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2014.524134] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62fd503-4133-4c9c-9876-f4dcbc0b02aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.532328] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22aaaaaf-ec46-4f46-ad1f-c845fc69636a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.621429] env[62619]: DEBUG nova.compute.manager [req-0ede3cd8-1448-4d35-9e73-e728c873e288 req-04e5eec2-8920-4306-9439-70746e426942 service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Received event network-vif-plugged-5186dbd9-28b8-4b21-91a1-4cf5100781e6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2014.621654] env[62619]: DEBUG oslo_concurrency.lockutils [req-0ede3cd8-1448-4d35-9e73-e728c873e288 req-04e5eec2-8920-4306-9439-70746e426942 service nova] Acquiring lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.621861] env[62619]: DEBUG oslo_concurrency.lockutils [req-0ede3cd8-1448-4d35-9e73-e728c873e288 req-04e5eec2-8920-4306-9439-70746e426942 service nova] Lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.622130] env[62619]: DEBUG oslo_concurrency.lockutils [req-0ede3cd8-1448-4d35-9e73-e728c873e288 req-04e5eec2-8920-4306-9439-70746e426942 service nova] Lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.622337] env[62619]: DEBUG nova.compute.manager [req-0ede3cd8-1448-4d35-9e73-e728c873e288 req-04e5eec2-8920-4306-9439-70746e426942 service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] No waiting events found dispatching network-vif-plugged-5186dbd9-28b8-4b21-91a1-4cf5100781e6 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2014.622531] env[62619]: WARNING nova.compute.manager [req-0ede3cd8-1448-4d35-9e73-e728c873e288 req-04e5eec2-8920-4306-9439-70746e426942 service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Received unexpected event network-vif-plugged-5186dbd9-28b8-4b21-91a1-4cf5100781e6 for instance with vm_state building and task_state spawning. [ 2014.706064] env[62619]: DEBUG nova.network.neutron [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Successfully updated port: 5186dbd9-28b8-4b21-91a1-4cf5100781e6 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2015.211491] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "refresh_cache-dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2015.211715] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "refresh_cache-dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2015.211992] env[62619]: DEBUG nova.network.neutron [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2015.745519] env[62619]: DEBUG nova.network.neutron [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2015.857288] env[62619]: DEBUG nova.network.neutron [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Updating instance_info_cache with network_info: [{"id": "5186dbd9-28b8-4b21-91a1-4cf5100781e6", "address": "fa:16:3e:f1:74:e6", "network": {"id": "50c6d508-1bcb-4435-b59e-4994f7ce32c0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1862625551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3f078f46880b423dae58d87e31f291d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5186dbd9-28", "ovs_interfaceid": "5186dbd9-28b8-4b21-91a1-4cf5100781e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.360253] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "refresh_cache-dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.360620] env[62619]: DEBUG nova.compute.manager [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Instance network_info: |[{"id": "5186dbd9-28b8-4b21-91a1-4cf5100781e6", "address": "fa:16:3e:f1:74:e6", "network": {"id": "50c6d508-1bcb-4435-b59e-4994f7ce32c0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1862625551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3f078f46880b423dae58d87e31f291d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5186dbd9-28", "ovs_interfaceid": "5186dbd9-28b8-4b21-91a1-4cf5100781e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2016.361078] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:74:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5186dbd9-28b8-4b21-91a1-4cf5100781e6', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2016.368391] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Creating folder: Project (3f078f46880b423dae58d87e31f291d3). Parent ref: group-v368875. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2016.368657] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51b27c65-2848-4b65-8dd3-7b5ef7324d19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.379411] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Created folder: Project (3f078f46880b423dae58d87e31f291d3) in parent group-v368875. [ 2016.379581] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Creating folder: Instances. Parent ref: group-v369197. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2016.379794] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-026da3b0-a8a9-4e17-9a73-fd7d46ad986e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.388656] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Created folder: Instances in parent group-v369197. [ 2016.388870] env[62619]: DEBUG oslo.service.loopingcall [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2016.389055] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2016.389239] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1b8c454-a2b1-4490-836f-7a349cbb1177 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.407620] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2016.407620] env[62619]: value = "task-1778693" [ 2016.407620] env[62619]: _type = "Task" [ 2016.407620] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.414864] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778693, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.648417] env[62619]: DEBUG nova.compute.manager [req-8438768d-d77b-40a3-8a77-8eea0989297e req-dc1c8ae7-77ba-4a32-b1c2-3d62dd7300f4 service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Received event network-changed-5186dbd9-28b8-4b21-91a1-4cf5100781e6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2016.648597] env[62619]: DEBUG nova.compute.manager [req-8438768d-d77b-40a3-8a77-8eea0989297e req-dc1c8ae7-77ba-4a32-b1c2-3d62dd7300f4 service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Refreshing instance network info cache due to event network-changed-5186dbd9-28b8-4b21-91a1-4cf5100781e6. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2016.648838] env[62619]: DEBUG oslo_concurrency.lockutils [req-8438768d-d77b-40a3-8a77-8eea0989297e req-dc1c8ae7-77ba-4a32-b1c2-3d62dd7300f4 service nova] Acquiring lock "refresh_cache-dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.648993] env[62619]: DEBUG oslo_concurrency.lockutils [req-8438768d-d77b-40a3-8a77-8eea0989297e req-dc1c8ae7-77ba-4a32-b1c2-3d62dd7300f4 service nova] Acquired lock "refresh_cache-dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.649190] env[62619]: DEBUG nova.network.neutron [req-8438768d-d77b-40a3-8a77-8eea0989297e req-dc1c8ae7-77ba-4a32-b1c2-3d62dd7300f4 service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Refreshing network info cache for port 5186dbd9-28b8-4b21-91a1-4cf5100781e6 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2016.918058] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778693, 'name': CreateVM_Task, 'duration_secs': 0.364641} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.918058] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2016.918539] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.918673] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.918992] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2016.919252] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-381ee890-8228-4487-baeb-84c320714e2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.923990] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2016.923990] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a63060-780d-a40a-32fe-2f5c9f464f1b" [ 2016.923990] env[62619]: _type = "Task" [ 2016.923990] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.931288] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a63060-780d-a40a-32fe-2f5c9f464f1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.306463] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.306463] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.306695] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "cec0ea75-042d-4ee5-91d5-cad86456ab97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.306695] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.306885] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.309051] env[62619]: INFO nova.compute.manager [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Terminating instance [ 2017.329566] env[62619]: DEBUG nova.network.neutron [req-8438768d-d77b-40a3-8a77-8eea0989297e req-dc1c8ae7-77ba-4a32-b1c2-3d62dd7300f4 service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Updated VIF entry in instance network info cache for port 5186dbd9-28b8-4b21-91a1-4cf5100781e6. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2017.329893] env[62619]: DEBUG nova.network.neutron [req-8438768d-d77b-40a3-8a77-8eea0989297e req-dc1c8ae7-77ba-4a32-b1c2-3d62dd7300f4 service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Updating instance_info_cache with network_info: [{"id": "5186dbd9-28b8-4b21-91a1-4cf5100781e6", "address": "fa:16:3e:f1:74:e6", "network": {"id": "50c6d508-1bcb-4435-b59e-4994f7ce32c0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1862625551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3f078f46880b423dae58d87e31f291d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5186dbd9-28", "ovs_interfaceid": "5186dbd9-28b8-4b21-91a1-4cf5100781e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2017.434629] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a63060-780d-a40a-32fe-2f5c9f464f1b, 'name': SearchDatastore_Task, 'duration_secs': 0.010481} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.434926] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.435179] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2017.435408] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2017.435554] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.435726] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2017.435977] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63adefca-3f71-415b-aaec-a91d9d5ae32d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.443909] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2017.444088] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2017.444732] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01091186-e582-44fb-9f55-5929950d3d82 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.449733] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2017.449733] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5296aa54-8172-c8bf-1e88-f7c713e302e0" [ 2017.449733] env[62619]: _type = "Task" [ 2017.449733] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.456901] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5296aa54-8172-c8bf-1e88-f7c713e302e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.814396] env[62619]: DEBUG nova.compute.manager [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2017.814652] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2017.815558] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1624f44e-449e-4e2c-acad-9a595fdadf06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.823550] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2017.823764] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad69eaf9-714d-40d7-9050-4c26cdc7a525 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.829796] env[62619]: DEBUG oslo_vmware.api [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2017.829796] env[62619]: value = "task-1778694" [ 2017.829796] env[62619]: _type = "Task" [ 2017.829796] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.832854] env[62619]: DEBUG oslo_concurrency.lockutils [req-8438768d-d77b-40a3-8a77-8eea0989297e req-dc1c8ae7-77ba-4a32-b1c2-3d62dd7300f4 service nova] Releasing lock "refresh_cache-dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.837459] env[62619]: DEBUG oslo_vmware.api [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.961741] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5296aa54-8172-c8bf-1e88-f7c713e302e0, 'name': SearchDatastore_Task, 'duration_secs': 0.008301} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.962304] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-216f6d15-4b83-42e5-9319-c87280523305 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.967675] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2017.967675] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eca7cc-156c-c431-75b6-2deb3a61ed48" [ 2017.967675] env[62619]: _type = "Task" [ 2017.967675] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.975286] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eca7cc-156c-c431-75b6-2deb3a61ed48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.339753] env[62619]: DEBUG oslo_vmware.api [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778694, 'name': PowerOffVM_Task, 'duration_secs': 0.224007} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.340017] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2018.340194] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2018.340459] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-120bf36c-324d-4d0c-a95f-85f0784b2668 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.463571] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2018.463820] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2018.464023] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Deleting the datastore file [datastore1] cec0ea75-042d-4ee5-91d5-cad86456ab97 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2018.464237] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca510262-d69f-4a2c-8ab9-b16a12ff4b2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.471630] env[62619]: DEBUG oslo_vmware.api [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2018.471630] env[62619]: value = "task-1778696" [ 2018.471630] env[62619]: _type = "Task" [ 2018.471630] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.478576] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52eca7cc-156c-c431-75b6-2deb3a61ed48, 'name': SearchDatastore_Task, 'duration_secs': 0.009336} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.479232] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.479534] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc/dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2018.479872] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ad65727-467a-47f8-9bf2-726dcce4835a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.484824] env[62619]: DEBUG oslo_vmware.api [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778696, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.489222] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2018.489222] env[62619]: value = "task-1778697" [ 2018.489222] env[62619]: _type = "Task" [ 2018.489222] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.496940] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778697, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.996069] env[62619]: DEBUG oslo_vmware.api [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778696, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161267} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.005286] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2019.005513] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2019.006304] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2019.006489] env[62619]: INFO nova.compute.manager [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Took 1.19 seconds to destroy the instance on the hypervisor. [ 2019.006801] env[62619]: DEBUG oslo.service.loopingcall [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2019.007975] env[62619]: DEBUG nova.compute.manager [-] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2019.008083] env[62619]: DEBUG nova.network.neutron [-] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2019.018993] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778697, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478967} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.019477] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc/dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2019.019714] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2019.020137] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97cff7e6-d98d-4108-92ab-92783c731c8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.030179] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2019.030179] env[62619]: value = "task-1778698" [ 2019.030179] env[62619]: _type = "Task" [ 2019.030179] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.038158] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778698, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.195948] env[62619]: INFO nova.compute.manager [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Rebuilding instance [ 2019.242809] env[62619]: DEBUG nova.compute.manager [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2019.243685] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32ca582-eb91-48e5-9167-e6b0fb0a7ea4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.490163] env[62619]: DEBUG nova.compute.manager [req-91af1672-1b1e-449d-878b-511048c2514a req-99d6d3e2-4291-46ba-8d39-56600c8bfb82 service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Received event network-vif-deleted-0de99671-66a1-4b86-9417-2955fdf1dcba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2019.491317] env[62619]: INFO nova.compute.manager [req-91af1672-1b1e-449d-878b-511048c2514a req-99d6d3e2-4291-46ba-8d39-56600c8bfb82 service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Neutron deleted interface 0de99671-66a1-4b86-9417-2955fdf1dcba; detaching it from the instance and deleting it from the info cache [ 2019.491596] env[62619]: DEBUG nova.network.neutron [req-91af1672-1b1e-449d-878b-511048c2514a req-99d6d3e2-4291-46ba-8d39-56600c8bfb82 service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.542636] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778698, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120299} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.542636] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2019.542636] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb9ba44-e792-4c0d-b474-b115319c670a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.567222] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc/dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2019.567222] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0157901-b0ae-4c48-b97d-36a0e3a232f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.585464] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2019.585464] env[62619]: value = "task-1778699" [ 2019.585464] env[62619]: _type = "Task" [ 2019.585464] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.593559] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778699, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.965168] env[62619]: DEBUG nova.network.neutron [-] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.994589] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cbbd7531-08e6-406d-9a79-85509e25e710 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.004747] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a919d0-f31d-4cd1-8145-61db5f1de070 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.033837] env[62619]: DEBUG nova.compute.manager [req-91af1672-1b1e-449d-878b-511048c2514a req-99d6d3e2-4291-46ba-8d39-56600c8bfb82 service nova] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Detach interface failed, port_id=0de99671-66a1-4b86-9417-2955fdf1dcba, reason: Instance cec0ea75-042d-4ee5-91d5-cad86456ab97 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2020.095276] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778699, 'name': ReconfigVM_Task, 'duration_secs': 0.281708} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.095578] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Reconfigured VM instance instance-00000076 to attach disk [datastore1] dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc/dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2020.096207] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2513e68-cc46-4f04-ba65-4a274f567147 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.103346] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2020.103346] env[62619]: value = "task-1778700" [ 2020.103346] env[62619]: _type = "Task" [ 2020.103346] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.111758] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778700, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.257758] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2020.258171] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-451ed660-2af2-44be-bca4-708a0e7b65cb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.265603] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2020.265603] env[62619]: value = "task-1778701" [ 2020.265603] env[62619]: _type = "Task" [ 2020.265603] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.273950] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778701, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.468028] env[62619]: INFO nova.compute.manager [-] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Took 1.46 seconds to deallocate network for instance. [ 2020.613454] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778700, 'name': Rename_Task, 'duration_secs': 0.145164} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.613743] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2020.613964] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-159d4d04-090f-426d-9c27-72696a808af2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.620504] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2020.620504] env[62619]: value = "task-1778702" [ 2020.620504] env[62619]: _type = "Task" [ 2020.620504] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.627598] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778702, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.775383] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778701, 'name': PowerOffVM_Task, 'duration_secs': 0.190263} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.775643] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2020.775873] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2020.776620] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3f5400-0df7-47b3-82b6-ca71f3bee539 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.783081] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2020.783287] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a024705-c4ec-4773-8b68-f767353e3d69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.863694] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2020.863882] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2020.864073] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleting the datastore file [datastore1] 0cde512e-a9f8-4f9b-9e08-41494090e314 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2020.864348] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61bd1eb6-e07c-4350-8bb0-02ac520d9b11 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.871311] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2020.871311] env[62619]: value = "task-1778704" [ 2020.871311] env[62619]: _type = "Task" [ 2020.871311] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.878988] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.975457] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.975776] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.976048] env[62619]: DEBUG nova.objects.instance [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'resources' on Instance uuid cec0ea75-042d-4ee5-91d5-cad86456ab97 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2021.129669] env[62619]: DEBUG oslo_vmware.api [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778702, 'name': PowerOnVM_Task, 'duration_secs': 0.455316} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.130056] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2021.130111] env[62619]: INFO nova.compute.manager [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Took 6.64 seconds to spawn the instance on the hypervisor. [ 2021.130248] env[62619]: DEBUG nova.compute.manager [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2021.131049] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d620b13-5c0c-4703-80fc-2146f8842347 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.383113] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142189} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.383113] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2021.383113] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2021.383113] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2021.548128] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056976f7-c9b4-4536-950d-222e849c2b75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.555788] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df629c8-9df7-4bb8-9be5-42b34605b76e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.586226] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f460ce0b-4db0-4336-8341-6b793dd16758 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.592959] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab9e52b-813b-4b3e-8ac0-4da0c619e6fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.605338] env[62619]: DEBUG nova.compute.provider_tree [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2021.648485] env[62619]: INFO nova.compute.manager [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Took 11.34 seconds to build instance. [ 2022.107919] env[62619]: DEBUG nova.scheduler.client.report [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2022.151861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-decba383-cf8e-48b7-9124-37a4ca328888 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.846s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.424623] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2022.424860] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2022.425028] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2022.425345] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2022.425345] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2022.425529] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2022.425798] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2022.425963] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2022.426149] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2022.426310] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2022.426478] env[62619]: DEBUG nova.virt.hardware [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2022.427373] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2066e55f-f4f3-479f-a57e-ab385cc5c2c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.435316] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8ed478-ee39-48c7-a894-cd81d0ab8fb6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.449608] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:8b:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9db75e6b-9462-4d5f-8939-a281ea75ea84', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2022.457080] env[62619]: DEBUG oslo.service.loopingcall [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2022.457295] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2022.457494] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a8fe6cf-2c40-4479-a5b7-914d6cac2216 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.476867] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2022.476867] env[62619]: value = "task-1778705" [ 2022.476867] env[62619]: _type = "Task" [ 2022.476867] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.487538] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778705, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.560488] env[62619]: INFO nova.compute.manager [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Rescuing [ 2022.560745] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "refresh_cache-dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2022.560922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "refresh_cache-dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2022.561121] env[62619]: DEBUG nova.network.neutron [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2022.612360] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.636s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.630581] env[62619]: INFO nova.scheduler.client.report [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Deleted allocations for instance cec0ea75-042d-4ee5-91d5-cad86456ab97 [ 2022.987248] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778705, 'name': CreateVM_Task, 'duration_secs': 0.327169} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.987442] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2022.988028] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2022.988204] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2022.988534] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2022.988784] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee9663f1-5dfa-4c76-874c-6460178714c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.992961] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2022.992961] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527a938d-e1d8-c2d7-294b-5d26cd9cfd50" [ 2022.992961] env[62619]: _type = "Task" [ 2022.992961] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.999883] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527a938d-e1d8-c2d7-294b-5d26cd9cfd50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.138306] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43872410-b78e-4a31-8c4c-fbb0d725398d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "cec0ea75-042d-4ee5-91d5-cad86456ab97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.832s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.265868] env[62619]: DEBUG nova.network.neutron [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Updating instance_info_cache with network_info: [{"id": "5186dbd9-28b8-4b21-91a1-4cf5100781e6", "address": "fa:16:3e:f1:74:e6", "network": {"id": "50c6d508-1bcb-4435-b59e-4994f7ce32c0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1862625551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3f078f46880b423dae58d87e31f291d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5186dbd9-28", "ovs_interfaceid": "5186dbd9-28b8-4b21-91a1-4cf5100781e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.503757] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]527a938d-e1d8-c2d7-294b-5d26cd9cfd50, 'name': SearchDatastore_Task, 'duration_secs': 0.019153} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.504068] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2023.504301] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2023.504527] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2023.504672] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.504847] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2023.505109] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c4b64f5-9ac4-4d2e-8516-a75a24476132 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.512974] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2023.513161] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2023.514072] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22394d93-fb6c-4712-9b01-1120647f21b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.518887] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2023.518887] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52529347-67ba-1e56-8184-b7f10b868ec9" [ 2023.518887] env[62619]: _type = "Task" [ 2023.518887] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.526195] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52529347-67ba-1e56-8184-b7f10b868ec9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.769072] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "refresh_cache-dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2024.029792] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52529347-67ba-1e56-8184-b7f10b868ec9, 'name': SearchDatastore_Task, 'duration_secs': 0.00828} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.030660] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1de86bb5-c9be-40b9-a569-6972b4ea7745 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.036204] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2024.036204] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529bc7c9-c8bb-bc24-3e54-5c509cd9e393" [ 2024.036204] env[62619]: _type = "Task" [ 2024.036204] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.043578] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529bc7c9-c8bb-bc24-3e54-5c509cd9e393, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.546937] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]529bc7c9-c8bb-bc24-3e54-5c509cd9e393, 'name': SearchDatastore_Task, 'duration_secs': 0.009398} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.547223] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2024.547458] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0cde512e-a9f8-4f9b-9e08-41494090e314/0cde512e-a9f8-4f9b-9e08-41494090e314.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2024.547710] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1daaf95b-7bf1-46f8-bc5e-d4d67417b386 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.554897] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2024.554897] env[62619]: value = "task-1778706" [ 2024.554897] env[62619]: _type = "Task" [ 2024.554897] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.564225] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778706, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.068545] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778706, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.410348} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.068940] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 0cde512e-a9f8-4f9b-9e08-41494090e314/0cde512e-a9f8-4f9b-9e08-41494090e314.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2025.069257] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2025.069577] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d6ce622-723c-495c-94fb-f00822847357 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.076783] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2025.076783] env[62619]: value = "task-1778707" [ 2025.076783] env[62619]: _type = "Task" [ 2025.076783] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.086443] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778707, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.291787] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "424da668-f458-44a6-9b38-e6c5db352a01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.292043] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.301849] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2025.302263] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18ceb7f8-217a-4bcc-a483-7925da19e3f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.309355] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2025.309355] env[62619]: value = "task-1778708" [ 2025.309355] env[62619]: _type = "Task" [ 2025.309355] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.316952] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778708, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.586429] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778707, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063348} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.586781] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2025.587544] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945470a6-ad81-4ecd-a0b3-77bbf91b8b3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.608601] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 0cde512e-a9f8-4f9b-9e08-41494090e314/0cde512e-a9f8-4f9b-9e08-41494090e314.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2025.608835] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63ec5320-3426-4061-b94a-7f19836c2a16 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.626960] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2025.626960] env[62619]: value = "task-1778709" [ 2025.626960] env[62619]: _type = "Task" [ 2025.626960] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.634875] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778709, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.794697] env[62619]: DEBUG nova.compute.manager [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2025.819591] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778708, 'name': PowerOffVM_Task, 'duration_secs': 0.172623} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.819851] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2025.820681] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5656ca0-1fdb-4480-bfc0-f54064899c2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.838603] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebcf54d-beb9-4b8e-8556-c356750e1ae3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.870682] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2025.870959] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fbc5ba21-14fd-4e37-b25d-e3cd4fd87ceb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.877619] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2025.877619] env[62619]: value = "task-1778710" [ 2025.877619] env[62619]: _type = "Task" [ 2025.877619] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.884993] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.137176] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778709, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.317935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.318243] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.319874] env[62619]: INFO nova.compute.claims [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2026.386784] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2026.387024] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2026.387266] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2026.387382] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2026.387556] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2026.387796] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a78aa53e-1085-4555-a209-7fdeb10630ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.395526] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2026.395692] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2026.396364] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-829543f5-ff92-4063-ad15-97137fa6679c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.400905] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2026.400905] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5252e376-d73a-3051-7ba2-f0e850da1b38" [ 2026.400905] env[62619]: _type = "Task" [ 2026.400905] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.407933] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5252e376-d73a-3051-7ba2-f0e850da1b38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.638025] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778709, 'name': ReconfigVM_Task, 'duration_secs': 0.699703} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.638025] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 0cde512e-a9f8-4f9b-9e08-41494090e314/0cde512e-a9f8-4f9b-9e08-41494090e314.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2026.638450] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50d55d79-752b-4023-a1ac-831066b13f6d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.645676] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2026.645676] env[62619]: value = "task-1778711" [ 2026.645676] env[62619]: _type = "Task" [ 2026.645676] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.653851] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778711, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.910781] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5252e376-d73a-3051-7ba2-f0e850da1b38, 'name': SearchDatastore_Task, 'duration_secs': 0.008503} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.911538] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2635faf2-3c25-4557-b33a-f0664e7f23cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.916356] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2026.916356] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b45066-4658-ae3f-ce26-393f46c76a76" [ 2026.916356] env[62619]: _type = "Task" [ 2026.916356] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.923574] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b45066-4658-ae3f-ce26-393f46c76a76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.155997] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778711, 'name': Rename_Task, 'duration_secs': 0.130535} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.156284] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2027.156517] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-566635a8-80af-4dcf-835b-52a0987e0b75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.162315] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2027.162315] env[62619]: value = "task-1778712" [ 2027.162315] env[62619]: _type = "Task" [ 2027.162315] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.169179] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778712, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.400796] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b494cfe0-3a97-4454-bea5-8956e92118bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.412551] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d14a4a-dc40-4e3e-affa-a10a37e0e65d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.454646] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1e5094-adf0-46d0-b53f-142fd0dc2a17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.457412] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b45066-4658-ae3f-ce26-393f46c76a76, 'name': SearchDatastore_Task, 'duration_secs': 0.008468} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.457740] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2027.458055] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. {{(pid=62619) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2027.458790] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f91f791-16e5-4f27-8754-0b90a0b2dbcd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.464057] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2daa7d2d-72d4-4095-a161-fb4e9e56dd25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.468469] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2027.468469] env[62619]: value = "task-1778713" [ 2027.468469] env[62619]: _type = "Task" [ 2027.468469] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.479359] env[62619]: DEBUG nova.compute.provider_tree [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2027.485092] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.678018] env[62619]: DEBUG oslo_vmware.api [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778712, 'name': PowerOnVM_Task, 'duration_secs': 0.434051} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.678385] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2027.678627] env[62619]: DEBUG nova.compute.manager [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2027.679555] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb5e6c9-3711-4111-99be-b947ce540604 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.978437] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467271} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.978757] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. [ 2027.979545] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f2feb4-56fa-4996-aa60-e3de96daa58f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.982866] env[62619]: DEBUG nova.scheduler.client.report [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2028.009325] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2028.009632] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad69f1fd-e368-4b5b-821d-f3bd63482a37 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.027686] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2028.027686] env[62619]: value = "task-1778714" [ 2028.027686] env[62619]: _type = "Task" [ 2028.027686] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.035494] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778714, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.198843] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.487796] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.169s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.488272] env[62619]: DEBUG nova.compute.manager [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2028.491405] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.292s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.491596] env[62619]: DEBUG nova.objects.instance [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2028.537229] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778714, 'name': ReconfigVM_Task, 'duration_secs': 0.264324} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.537500] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Reconfigured VM instance instance-00000076 to attach disk [datastore1] dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2028.538326] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05595bc3-6f83-4f1f-9ce3-04f1227a66ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.563044] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79505096-b541-4f65-9895-20496cc9609c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.577819] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2028.577819] env[62619]: value = "task-1778715" [ 2028.577819] env[62619]: _type = "Task" [ 2028.577819] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.586883] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778715, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.996376] env[62619]: DEBUG nova.compute.utils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2029.000849] env[62619]: DEBUG nova.compute.manager [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2029.001037] env[62619]: DEBUG nova.network.neutron [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2029.059109] env[62619]: DEBUG nova.policy [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9271ec762cd42168ab2c9957d38eaba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4010737526cd4a3aa36f15a187051010', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 2029.077130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.077342] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.088401] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778715, 'name': ReconfigVM_Task, 'duration_secs': 0.210358} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.090238] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2029.090712] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38c5662b-5776-4d88-8867-1502d4bc0685 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.097752] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2029.097752] env[62619]: value = "task-1778716" [ 2029.097752] env[62619]: _type = "Task" [ 2029.097752] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.105302] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778716, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.328170] env[62619]: DEBUG nova.network.neutron [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Successfully created port: 3c71631e-780b-40b4-b5d0-47a37178aa17 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2029.502033] env[62619]: DEBUG nova.compute.manager [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2029.507650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6665c01-ac31-4249-8f5f-d7706b38e7b6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.581403] env[62619]: DEBUG nova.compute.utils [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2029.608382] env[62619]: DEBUG oslo_vmware.api [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778716, 'name': PowerOnVM_Task, 'duration_secs': 0.379696} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.608690] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2029.611241] env[62619]: DEBUG nova.compute.manager [None req-e7869a38-f685-4745-93d0-abb558e9eea9 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2029.612265] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c532d5c-a0c5-4705-bd08-ff3273df19e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.083975] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.517518] env[62619]: DEBUG nova.compute.manager [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2030.538821] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2030.539085] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2030.539311] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2030.539510] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2030.539658] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2030.539825] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2030.540052] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2030.540247] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2030.540431] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2030.540611] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2030.540794] env[62619]: DEBUG nova.virt.hardware [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2030.541665] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc0d647-9912-4ee1-b724-ea24817583a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.549477] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373f4441-5910-4ba4-98f0-26849956208f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.712515] env[62619]: DEBUG nova.compute.manager [req-9b8838ac-351f-461c-a85e-52ad5b785abd req-c37b40b2-e1a1-45e3-a936-c516dd9c7863 service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Received event network-vif-plugged-3c71631e-780b-40b4-b5d0-47a37178aa17 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2030.712726] env[62619]: DEBUG oslo_concurrency.lockutils [req-9b8838ac-351f-461c-a85e-52ad5b785abd req-c37b40b2-e1a1-45e3-a936-c516dd9c7863 service nova] Acquiring lock "424da668-f458-44a6-9b38-e6c5db352a01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.712928] env[62619]: DEBUG oslo_concurrency.lockutils [req-9b8838ac-351f-461c-a85e-52ad5b785abd req-c37b40b2-e1a1-45e3-a936-c516dd9c7863 service nova] Lock "424da668-f458-44a6-9b38-e6c5db352a01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.713106] env[62619]: DEBUG oslo_concurrency.lockutils [req-9b8838ac-351f-461c-a85e-52ad5b785abd req-c37b40b2-e1a1-45e3-a936-c516dd9c7863 service nova] Lock "424da668-f458-44a6-9b38-e6c5db352a01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.713272] env[62619]: DEBUG nova.compute.manager [req-9b8838ac-351f-461c-a85e-52ad5b785abd req-c37b40b2-e1a1-45e3-a936-c516dd9c7863 service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] No waiting events found dispatching network-vif-plugged-3c71631e-780b-40b4-b5d0-47a37178aa17 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2030.713432] env[62619]: WARNING nova.compute.manager [req-9b8838ac-351f-461c-a85e-52ad5b785abd req-c37b40b2-e1a1-45e3-a936-c516dd9c7863 service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Received unexpected event network-vif-plugged-3c71631e-780b-40b4-b5d0-47a37178aa17 for instance with vm_state building and task_state spawning. [ 2030.963647] env[62619]: DEBUG nova.network.neutron [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Successfully updated port: 3c71631e-780b-40b4-b5d0-47a37178aa17 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2031.093027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "8a780b5e-1767-47c3-99f7-4844e7df5346" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.093338] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "8a780b5e-1767-47c3-99f7-4844e7df5346" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.147922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.148177] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.148415] env[62619]: INFO nova.compute.manager [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Attaching volume b1cdf37a-a1a8-498a-a080-5172a5357cfd to /dev/sdb [ 2031.183736] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f700ad1-a7aa-4aaa-9062-208e64cfa20c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.191146] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7133dd6-35ec-4c50-aa6a-e3cf4c0ea9dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.205079] env[62619]: DEBUG nova.virt.block_device [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Updating existing volume attachment record: 7c8cc321-7ba8-4e29-89b2-679e128821ba {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2031.466217] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "refresh_cache-424da668-f458-44a6-9b38-e6c5db352a01" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2031.466362] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquired lock "refresh_cache-424da668-f458-44a6-9b38-e6c5db352a01" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2031.466490] env[62619]: DEBUG nova.network.neutron [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2031.595910] env[62619]: DEBUG nova.compute.manager [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2031.999425] env[62619]: DEBUG nova.network.neutron [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2032.117828] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2032.118099] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.119897] env[62619]: INFO nova.compute.claims [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2032.160382] env[62619]: DEBUG nova.network.neutron [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Updating instance_info_cache with network_info: [{"id": "3c71631e-780b-40b4-b5d0-47a37178aa17", "address": "fa:16:3e:03:ef:bd", "network": {"id": "6a1847de-b585-445d-8064-dc33dc365719", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1852054191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4010737526cd4a3aa36f15a187051010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c71631e-78", "ovs_interfaceid": "3c71631e-780b-40b4-b5d0-47a37178aa17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2032.662866] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Releasing lock "refresh_cache-424da668-f458-44a6-9b38-e6c5db352a01" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2032.663215] env[62619]: DEBUG nova.compute.manager [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Instance network_info: |[{"id": "3c71631e-780b-40b4-b5d0-47a37178aa17", "address": "fa:16:3e:03:ef:bd", "network": {"id": "6a1847de-b585-445d-8064-dc33dc365719", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1852054191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4010737526cd4a3aa36f15a187051010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c71631e-78", "ovs_interfaceid": "3c71631e-780b-40b4-b5d0-47a37178aa17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2032.663625] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:ef:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4c8c8fd-baca-4e60-97dc-ff0418d63215', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c71631e-780b-40b4-b5d0-47a37178aa17', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2032.671044] env[62619]: DEBUG oslo.service.loopingcall [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2032.671479] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2032.671710] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc2fba0f-cded-456e-9277-640c3a1a880b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.693186] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2032.693186] env[62619]: value = "task-1778720" [ 2032.693186] env[62619]: _type = "Task" [ 2032.693186] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.700609] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778720, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.739820] env[62619]: DEBUG nova.compute.manager [req-a96ae865-73d8-430b-8204-804964169fa2 req-5b598d99-4b96-416b-b6c6-7d46a85170bd service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Received event network-changed-3c71631e-780b-40b4-b5d0-47a37178aa17 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2032.740011] env[62619]: DEBUG nova.compute.manager [req-a96ae865-73d8-430b-8204-804964169fa2 req-5b598d99-4b96-416b-b6c6-7d46a85170bd service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Refreshing instance network info cache due to event network-changed-3c71631e-780b-40b4-b5d0-47a37178aa17. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2032.740227] env[62619]: DEBUG oslo_concurrency.lockutils [req-a96ae865-73d8-430b-8204-804964169fa2 req-5b598d99-4b96-416b-b6c6-7d46a85170bd service nova] Acquiring lock "refresh_cache-424da668-f458-44a6-9b38-e6c5db352a01" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2032.740363] env[62619]: DEBUG oslo_concurrency.lockutils [req-a96ae865-73d8-430b-8204-804964169fa2 req-5b598d99-4b96-416b-b6c6-7d46a85170bd service nova] Acquired lock "refresh_cache-424da668-f458-44a6-9b38-e6c5db352a01" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2032.740529] env[62619]: DEBUG nova.network.neutron [req-a96ae865-73d8-430b-8204-804964169fa2 req-5b598d99-4b96-416b-b6c6-7d46a85170bd service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Refreshing network info cache for port 3c71631e-780b-40b4-b5d0-47a37178aa17 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2033.206167] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778720, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.211038] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc16557e-7ca2-42af-a02b-ef9c18a3bcbe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.219240] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d2bef6-12ab-41ca-ba5f-e68de4cbd199 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.253185] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2565986-e23d-4a38-984d-d474e89cc958 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.260995] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90747000-b332-4f72-8993-23c3ed832dbd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.274943] env[62619]: DEBUG nova.compute.provider_tree [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2033.444226] env[62619]: DEBUG nova.network.neutron [req-a96ae865-73d8-430b-8204-804964169fa2 req-5b598d99-4b96-416b-b6c6-7d46a85170bd service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Updated VIF entry in instance network info cache for port 3c71631e-780b-40b4-b5d0-47a37178aa17. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2033.444874] env[62619]: DEBUG nova.network.neutron [req-a96ae865-73d8-430b-8204-804964169fa2 req-5b598d99-4b96-416b-b6c6-7d46a85170bd service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Updating instance_info_cache with network_info: [{"id": "3c71631e-780b-40b4-b5d0-47a37178aa17", "address": "fa:16:3e:03:ef:bd", "network": {"id": "6a1847de-b585-445d-8064-dc33dc365719", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1852054191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4010737526cd4a3aa36f15a187051010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c71631e-78", "ovs_interfaceid": "3c71631e-780b-40b4-b5d0-47a37178aa17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.704738] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778720, 'name': CreateVM_Task, 'duration_secs': 0.603351} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.704946] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2033.705642] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.705841] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.706204] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2033.706478] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f4cab51-2265-4143-b24b-34fe2fa2676e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.710721] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2033.710721] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5274c80f-fbdc-a532-1cb3-b6e7d5ba25fd" [ 2033.710721] env[62619]: _type = "Task" [ 2033.710721] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.718195] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5274c80f-fbdc-a532-1cb3-b6e7d5ba25fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.778767] env[62619]: DEBUG nova.scheduler.client.report [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2033.948745] env[62619]: DEBUG oslo_concurrency.lockutils [req-a96ae865-73d8-430b-8204-804964169fa2 req-5b598d99-4b96-416b-b6c6-7d46a85170bd service nova] Releasing lock "refresh_cache-424da668-f458-44a6-9b38-e6c5db352a01" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.221238] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5274c80f-fbdc-a532-1cb3-b6e7d5ba25fd, 'name': SearchDatastore_Task, 'duration_secs': 0.012212} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.221579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.221758] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2034.221983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2034.222150] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.222322] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2034.222569] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b855905-6b03-4be1-ae09-b8d476e16014 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.230429] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2034.230596] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2034.231282] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-767d8ead-ef23-4075-8442-2070985787d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.236246] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2034.236246] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525d2223-27f3-e0b7-2b48-413c72a98148" [ 2034.236246] env[62619]: _type = "Task" [ 2034.236246] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.243391] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525d2223-27f3-e0b7-2b48-413c72a98148, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.283572] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.165s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.284063] env[62619]: DEBUG nova.compute.manager [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2034.748089] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525d2223-27f3-e0b7-2b48-413c72a98148, 'name': SearchDatastore_Task, 'duration_secs': 0.009266} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.748870] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44826e49-f720-412f-8750-bcd6deacff5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.754274] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2034.754274] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52df82d2-3a88-58fd-a7a5-c5c660ab5672" [ 2034.754274] env[62619]: _type = "Task" [ 2034.754274] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.761588] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52df82d2-3a88-58fd-a7a5-c5c660ab5672, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.788985] env[62619]: DEBUG nova.compute.utils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2034.790404] env[62619]: DEBUG nova.compute.manager [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2034.790610] env[62619]: DEBUG nova.network.neutron [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2034.828078] env[62619]: DEBUG nova.policy [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '960fb31864a64d15a0ef3833e622dacb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f078f46880b423dae58d87e31f291d3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 2035.078233] env[62619]: DEBUG nova.network.neutron [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Successfully created port: cb8fcd68-9c4f-498a-9161-617782b4cac3 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2035.265088] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52df82d2-3a88-58fd-a7a5-c5c660ab5672, 'name': SearchDatastore_Task, 'duration_secs': 0.010138} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.265432] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2035.265580] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 424da668-f458-44a6-9b38-e6c5db352a01/424da668-f458-44a6-9b38-e6c5db352a01.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2035.265842] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-caf6ac86-e13f-4f18-b48b-dc90786e855e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.272878] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2035.272878] env[62619]: value = "task-1778722" [ 2035.272878] env[62619]: _type = "Task" [ 2035.272878] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.280792] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778722, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.293479] env[62619]: DEBUG nova.compute.manager [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2035.747446] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2035.747738] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369202', 'volume_id': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'name': 'volume-b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e599f5ad-1b4d-4912-9b96-2544c52b0acf', 'attached_at': '', 'detached_at': '', 'volume_id': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'serial': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2035.748700] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff99099-140f-45f8-8768-206ade1531a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.765005] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05089665-46ab-4eff-b268-22459cd074c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.789433] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] volume-b1cdf37a-a1a8-498a-a080-5172a5357cfd/volume-b1cdf37a-a1a8-498a-a080-5172a5357cfd.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2035.792548] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad536bbe-e2b5-45f5-aae8-0fbbe1aabc1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.813886] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778722, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470232} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.815089] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 424da668-f458-44a6-9b38-e6c5db352a01/424da668-f458-44a6-9b38-e6c5db352a01.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2035.815340] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2035.815648] env[62619]: DEBUG oslo_vmware.api [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2035.815648] env[62619]: value = "task-1778723" [ 2035.815648] env[62619]: _type = "Task" [ 2035.815648] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.815828] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1857214c-8e6c-4666-871d-7ea0fe335433 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.826411] env[62619]: DEBUG oslo_vmware.api [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778723, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.827729] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2035.827729] env[62619]: value = "task-1778724" [ 2035.827729] env[62619]: _type = "Task" [ 2035.827729] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.835507] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778724, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.311023] env[62619]: DEBUG nova.compute.manager [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2036.328800] env[62619]: DEBUG oslo_vmware.api [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778723, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.337649] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778724, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.223443} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.337907] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2036.338681] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c596f2-e4ec-4258-a8bd-de93d92fbbc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.343438] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2036.343644] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2036.343802] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2036.343985] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2036.344169] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2036.344321] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2036.344594] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2036.344817] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2036.345055] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2036.345267] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2036.345444] env[62619]: DEBUG nova.virt.hardware [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2036.346194] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d38149-434d-4a51-9f28-95a8cf357631 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.362363] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd594a98-0f11-4ccd-a3bd-2dcd7ad7477c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.374675] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 424da668-f458-44a6-9b38-e6c5db352a01/424da668-f458-44a6-9b38-e6c5db352a01.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2036.374926] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f76a6ff6-a78a-4d1e-8dd8-418ce8ea214e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.400021] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2036.400021] env[62619]: value = "task-1778725" [ 2036.400021] env[62619]: _type = "Task" [ 2036.400021] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.408707] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.459949] env[62619]: DEBUG nova.compute.manager [req-dd25a46e-5f3a-40d9-965d-73bd5b1b4fee req-1f65bb97-7ad8-433c-9176-9e3eecf942a1 service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Received event network-vif-plugged-cb8fcd68-9c4f-498a-9161-617782b4cac3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2036.460185] env[62619]: DEBUG oslo_concurrency.lockutils [req-dd25a46e-5f3a-40d9-965d-73bd5b1b4fee req-1f65bb97-7ad8-433c-9176-9e3eecf942a1 service nova] Acquiring lock "8a780b5e-1767-47c3-99f7-4844e7df5346-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.460395] env[62619]: DEBUG oslo_concurrency.lockutils [req-dd25a46e-5f3a-40d9-965d-73bd5b1b4fee req-1f65bb97-7ad8-433c-9176-9e3eecf942a1 service nova] Lock "8a780b5e-1767-47c3-99f7-4844e7df5346-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.460632] env[62619]: DEBUG oslo_concurrency.lockutils [req-dd25a46e-5f3a-40d9-965d-73bd5b1b4fee req-1f65bb97-7ad8-433c-9176-9e3eecf942a1 service nova] Lock "8a780b5e-1767-47c3-99f7-4844e7df5346-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.460745] env[62619]: DEBUG nova.compute.manager [req-dd25a46e-5f3a-40d9-965d-73bd5b1b4fee req-1f65bb97-7ad8-433c-9176-9e3eecf942a1 service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] No waiting events found dispatching network-vif-plugged-cb8fcd68-9c4f-498a-9161-617782b4cac3 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2036.460896] env[62619]: WARNING nova.compute.manager [req-dd25a46e-5f3a-40d9-965d-73bd5b1b4fee req-1f65bb97-7ad8-433c-9176-9e3eecf942a1 service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Received unexpected event network-vif-plugged-cb8fcd68-9c4f-498a-9161-617782b4cac3 for instance with vm_state building and task_state spawning. [ 2036.544973] env[62619]: DEBUG nova.network.neutron [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Successfully updated port: cb8fcd68-9c4f-498a-9161-617782b4cac3 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2036.829054] env[62619]: DEBUG oslo_vmware.api [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778723, 'name': ReconfigVM_Task, 'duration_secs': 0.541515} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.829247] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Reconfigured VM instance instance-00000075 to attach disk [datastore1] volume-b1cdf37a-a1a8-498a-a080-5172a5357cfd/volume-b1cdf37a-a1a8-498a-a080-5172a5357cfd.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2036.834318] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af7d72cf-431a-4ad6-a00b-ddd35d62d4eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.850439] env[62619]: DEBUG oslo_vmware.api [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2036.850439] env[62619]: value = "task-1778726" [ 2036.850439] env[62619]: _type = "Task" [ 2036.850439] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.858536] env[62619]: DEBUG oslo_vmware.api [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778726, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.910159] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.048014] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.048195] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.048382] env[62619]: DEBUG nova.network.neutron [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2037.359927] env[62619]: DEBUG oslo_vmware.api [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778726, 'name': ReconfigVM_Task, 'duration_secs': 0.161937} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.360299] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369202', 'volume_id': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'name': 'volume-b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e599f5ad-1b4d-4912-9b96-2544c52b0acf', 'attached_at': '', 'detached_at': '', 'volume_id': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'serial': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2037.411983] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778725, 'name': ReconfigVM_Task, 'duration_secs': 0.941019} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.412205] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 424da668-f458-44a6-9b38-e6c5db352a01/424da668-f458-44a6-9b38-e6c5db352a01.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2037.412765] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7320e8f-24ee-4da0-87f4-255f4040670c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.418944] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2037.418944] env[62619]: value = "task-1778727" [ 2037.418944] env[62619]: _type = "Task" [ 2037.418944] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.425830] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778727, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.582588] env[62619]: DEBUG nova.network.neutron [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2037.698322] env[62619]: DEBUG nova.network.neutron [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Updating instance_info_cache with network_info: [{"id": "cb8fcd68-9c4f-498a-9161-617782b4cac3", "address": "fa:16:3e:bd:6c:87", "network": {"id": "50c6d508-1bcb-4435-b59e-4994f7ce32c0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1862625551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3f078f46880b423dae58d87e31f291d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb8fcd68-9c", "ovs_interfaceid": "cb8fcd68-9c4f-498a-9161-617782b4cac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.928633] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778727, 'name': Rename_Task, 'duration_secs': 0.142938} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.928937] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2037.929191] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e75b7d3-cc6a-4a27-bafa-cd2687f200fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.935802] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2037.935802] env[62619]: value = "task-1778728" [ 2037.935802] env[62619]: _type = "Task" [ 2037.935802] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.945232] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.200814] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.201133] env[62619]: DEBUG nova.compute.manager [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Instance network_info: |[{"id": "cb8fcd68-9c4f-498a-9161-617782b4cac3", "address": "fa:16:3e:bd:6c:87", "network": {"id": "50c6d508-1bcb-4435-b59e-4994f7ce32c0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1862625551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3f078f46880b423dae58d87e31f291d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb8fcd68-9c", "ovs_interfaceid": "cb8fcd68-9c4f-498a-9161-617782b4cac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2038.201564] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:6c:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb8fcd68-9c4f-498a-9161-617782b4cac3', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2038.208985] env[62619]: DEBUG oslo.service.loopingcall [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2038.209206] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2038.209427] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74de2c7f-285c-447e-a908-fcceb720f9e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.228930] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2038.228930] env[62619]: value = "task-1778729" [ 2038.228930] env[62619]: _type = "Task" [ 2038.228930] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.236375] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778729, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.395892] env[62619]: DEBUG nova.objects.instance [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lazy-loading 'flavor' on Instance uuid e599f5ad-1b4d-4912-9b96-2544c52b0acf {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2038.446285] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778728, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.488234] env[62619]: DEBUG nova.compute.manager [req-cc61d86d-ab4f-4505-b65b-e03ccace022a req-6e6354fb-66ec-4b97-b852-2f1cf777b76e service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Received event network-changed-cb8fcd68-9c4f-498a-9161-617782b4cac3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2038.488467] env[62619]: DEBUG nova.compute.manager [req-cc61d86d-ab4f-4505-b65b-e03ccace022a req-6e6354fb-66ec-4b97-b852-2f1cf777b76e service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Refreshing instance network info cache due to event network-changed-cb8fcd68-9c4f-498a-9161-617782b4cac3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2038.488752] env[62619]: DEBUG oslo_concurrency.lockutils [req-cc61d86d-ab4f-4505-b65b-e03ccace022a req-6e6354fb-66ec-4b97-b852-2f1cf777b76e service nova] Acquiring lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.489011] env[62619]: DEBUG oslo_concurrency.lockutils [req-cc61d86d-ab4f-4505-b65b-e03ccace022a req-6e6354fb-66ec-4b97-b852-2f1cf777b76e service nova] Acquired lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.489293] env[62619]: DEBUG nova.network.neutron [req-cc61d86d-ab4f-4505-b65b-e03ccace022a req-6e6354fb-66ec-4b97-b852-2f1cf777b76e service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Refreshing network info cache for port cb8fcd68-9c4f-498a-9161-617782b4cac3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2038.739364] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778729, 'name': CreateVM_Task, 'duration_secs': 0.419119} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.739532] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2038.740206] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.740375] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.740779] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2038.741077] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f217abb8-5a85-4856-96ee-347d4dd5772e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.745633] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2038.745633] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524e87e4-3ef2-5600-f46c-294336699a25" [ 2038.745633] env[62619]: _type = "Task" [ 2038.745633] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.753216] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524e87e4-3ef2-5600-f46c-294336699a25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.901597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8279c2db-5ab1-4d7e-964d-c82c3c7c1a11 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.753s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.946616] env[62619]: DEBUG oslo_vmware.api [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778728, 'name': PowerOnVM_Task, 'duration_secs': 0.776541} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.946894] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2038.947162] env[62619]: INFO nova.compute.manager [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Took 8.43 seconds to spawn the instance on the hypervisor. [ 2038.947370] env[62619]: DEBUG nova.compute.manager [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2038.948226] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d573b432-0262-4e39-8e40-6bda64bf8e3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.179276] env[62619]: DEBUG nova.network.neutron [req-cc61d86d-ab4f-4505-b65b-e03ccace022a req-6e6354fb-66ec-4b97-b852-2f1cf777b76e service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Updated VIF entry in instance network info cache for port cb8fcd68-9c4f-498a-9161-617782b4cac3. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2039.179638] env[62619]: DEBUG nova.network.neutron [req-cc61d86d-ab4f-4505-b65b-e03ccace022a req-6e6354fb-66ec-4b97-b852-2f1cf777b76e service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Updating instance_info_cache with network_info: [{"id": "cb8fcd68-9c4f-498a-9161-617782b4cac3", "address": "fa:16:3e:bd:6c:87", "network": {"id": "50c6d508-1bcb-4435-b59e-4994f7ce32c0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1862625551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3f078f46880b423dae58d87e31f291d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb8fcd68-9c", "ovs_interfaceid": "cb8fcd68-9c4f-498a-9161-617782b4cac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2039.255752] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]524e87e4-3ef2-5600-f46c-294336699a25, 'name': SearchDatastore_Task, 'duration_secs': 0.010729} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.256063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.256295] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2039.256557] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.256754] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.256955] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2039.257220] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-282d5815-70fe-4817-a7c9-4402d7fb2e4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.265318] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2039.265492] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2039.266191] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4dac5ee-30cb-4bdf-b4f9-030ce84edcbf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.271196] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2039.271196] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52735f22-70de-9c03-3d2e-e0bf8655f61d" [ 2039.271196] env[62619]: _type = "Task" [ 2039.271196] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.278226] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52735f22-70de-9c03-3d2e-e0bf8655f61d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.465735] env[62619]: INFO nova.compute.manager [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Took 13.17 seconds to build instance. [ 2039.682329] env[62619]: DEBUG oslo_concurrency.lockutils [req-cc61d86d-ab4f-4505-b65b-e03ccace022a req-6e6354fb-66ec-4b97-b852-2f1cf777b76e service nova] Releasing lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.782654] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52735f22-70de-9c03-3d2e-e0bf8655f61d, 'name': SearchDatastore_Task, 'duration_secs': 0.007936} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.783689] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5769829c-a879-4bea-9fe1-3593856ce6a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.789133] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2039.789133] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b6bf6c-9fe4-966f-b860-a7f0974a6776" [ 2039.789133] env[62619]: _type = "Task" [ 2039.789133] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.796433] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b6bf6c-9fe4-966f-b860-a7f0974a6776, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.968302] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27e8108f-d9fe-4b45-8e3d-d485bf3816f2 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.676s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.300325] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52b6bf6c-9fe4-966f-b860-a7f0974a6776, 'name': SearchDatastore_Task, 'duration_secs': 0.009626} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.300591] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.300973] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8a780b5e-1767-47c3-99f7-4844e7df5346/8a780b5e-1767-47c3-99f7-4844e7df5346.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2040.301165] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c111ac13-5833-4acd-be21-cefbe45d9a1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.308145] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2040.308145] env[62619]: value = "task-1778730" [ 2040.308145] env[62619]: _type = "Task" [ 2040.308145] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.315857] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.600595] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "7b29140c-320e-4497-b724-2587e2ff5793" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.601185] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "7b29140c-320e-4497-b724-2587e2ff5793" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.751667] env[62619]: DEBUG nova.compute.manager [req-8234aab7-f583-4c7d-b0fe-dc76395c4d66 req-75e47fe8-c661-4c38-a05a-37f94cd18c99 service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Received event network-changed-3c71631e-780b-40b4-b5d0-47a37178aa17 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2040.751929] env[62619]: DEBUG nova.compute.manager [req-8234aab7-f583-4c7d-b0fe-dc76395c4d66 req-75e47fe8-c661-4c38-a05a-37f94cd18c99 service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Refreshing instance network info cache due to event network-changed-3c71631e-780b-40b4-b5d0-47a37178aa17. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2040.752169] env[62619]: DEBUG oslo_concurrency.lockutils [req-8234aab7-f583-4c7d-b0fe-dc76395c4d66 req-75e47fe8-c661-4c38-a05a-37f94cd18c99 service nova] Acquiring lock "refresh_cache-424da668-f458-44a6-9b38-e6c5db352a01" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.752378] env[62619]: DEBUG oslo_concurrency.lockutils [req-8234aab7-f583-4c7d-b0fe-dc76395c4d66 req-75e47fe8-c661-4c38-a05a-37f94cd18c99 service nova] Acquired lock "refresh_cache-424da668-f458-44a6-9b38-e6c5db352a01" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.752467] env[62619]: DEBUG nova.network.neutron [req-8234aab7-f583-4c7d-b0fe-dc76395c4d66 req-75e47fe8-c661-4c38-a05a-37f94cd18c99 service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Refreshing network info cache for port 3c71631e-780b-40b4-b5d0-47a37178aa17 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2040.823786] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778730, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.103978] env[62619]: DEBUG nova.compute.manager [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2041.318976] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778730, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632262} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.319273] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8a780b5e-1767-47c3-99f7-4844e7df5346/8a780b5e-1767-47c3-99f7-4844e7df5346.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2041.319489] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2041.319747] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b97867b-b817-4231-a2f1-c5de50e00fe2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.326841] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2041.326841] env[62619]: value = "task-1778731" [ 2041.326841] env[62619]: _type = "Task" [ 2041.326841] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.337407] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778731, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.484390] env[62619]: DEBUG nova.network.neutron [req-8234aab7-f583-4c7d-b0fe-dc76395c4d66 req-75e47fe8-c661-4c38-a05a-37f94cd18c99 service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Updated VIF entry in instance network info cache for port 3c71631e-780b-40b4-b5d0-47a37178aa17. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2041.484738] env[62619]: DEBUG nova.network.neutron [req-8234aab7-f583-4c7d-b0fe-dc76395c4d66 req-75e47fe8-c661-4c38-a05a-37f94cd18c99 service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Updating instance_info_cache with network_info: [{"id": "3c71631e-780b-40b4-b5d0-47a37178aa17", "address": "fa:16:3e:03:ef:bd", "network": {"id": "6a1847de-b585-445d-8064-dc33dc365719", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1852054191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4010737526cd4a3aa36f15a187051010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c71631e-78", "ovs_interfaceid": "3c71631e-780b-40b4-b5d0-47a37178aa17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2041.626093] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.626405] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.628507] env[62619]: INFO nova.compute.claims [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2041.836627] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778731, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081508} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.836965] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2041.837778] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9856139-fa43-491f-b4c8-78363df459c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.860011] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 8a780b5e-1767-47c3-99f7-4844e7df5346/8a780b5e-1767-47c3-99f7-4844e7df5346.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2041.860287] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-660ff19b-85b9-45cd-8359-b2b08b9f066e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.879009] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2041.879009] env[62619]: value = "task-1778732" [ 2041.879009] env[62619]: _type = "Task" [ 2041.879009] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.886890] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778732, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.987352] env[62619]: DEBUG oslo_concurrency.lockutils [req-8234aab7-f583-4c7d-b0fe-dc76395c4d66 req-75e47fe8-c661-4c38-a05a-37f94cd18c99 service nova] Releasing lock "refresh_cache-424da668-f458-44a6-9b38-e6c5db352a01" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2042.388566] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778732, 'name': ReconfigVM_Task, 'duration_secs': 0.259211} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.388875] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 8a780b5e-1767-47c3-99f7-4844e7df5346/8a780b5e-1767-47c3-99f7-4844e7df5346.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2042.389526] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d979cc58-718f-46bd-bca9-4fc6cb01e132 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.396067] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2042.396067] env[62619]: value = "task-1778733" [ 2042.396067] env[62619]: _type = "Task" [ 2042.396067] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.403604] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778733, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.717345] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559fc2c7-c378-4aed-b356-3a3bd3042eef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.724449] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978473f5-b7dd-4ebf-9924-3a979a152182 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.754634] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513b7689-1377-4b49-9b13-b474f5ba1903 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.761603] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c172ef1-5b68-4644-857c-562013fe852f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.774111] env[62619]: DEBUG nova.compute.provider_tree [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2042.905717] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778733, 'name': Rename_Task, 'duration_secs': 0.183625} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.905951] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2042.906180] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe912667-721c-4683-9ec7-3d9d94b3d093 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.912967] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2042.912967] env[62619]: value = "task-1778734" [ 2042.912967] env[62619]: _type = "Task" [ 2042.912967] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.920248] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778734, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.276929] env[62619]: DEBUG nova.scheduler.client.report [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2043.423725] env[62619]: DEBUG oslo_vmware.api [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778734, 'name': PowerOnVM_Task, 'duration_secs': 0.440393} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.423991] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2043.424208] env[62619]: INFO nova.compute.manager [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Took 7.11 seconds to spawn the instance on the hypervisor. [ 2043.424383] env[62619]: DEBUG nova.compute.manager [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2043.425134] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7d3c5a-a9cb-49c3-8645-6988bc72b6e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.782141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.156s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.782624] env[62619]: DEBUG nova.compute.manager [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2043.941135] env[62619]: INFO nova.compute.manager [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Took 11.84 seconds to build instance. [ 2044.241962] env[62619]: INFO nova.compute.manager [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Rescuing [ 2044.242249] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2044.242449] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2044.242694] env[62619]: DEBUG nova.network.neutron [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2044.287044] env[62619]: DEBUG nova.compute.utils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2044.288418] env[62619]: DEBUG nova.compute.manager [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2044.288586] env[62619]: DEBUG nova.network.neutron [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2044.327962] env[62619]: DEBUG nova.policy [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a13407c9bfc448bb27a06680d41afb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72ae04936c9b4ea19b5d7fac78c96ba4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 2044.442207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4fed94f-4161-4070-a045-6763c08c21c5 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "8a780b5e-1767-47c3-99f7-4844e7df5346" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.349s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.581901] env[62619]: DEBUG nova.network.neutron [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Successfully created port: 34b38d14-c4de-4ea8-ade4-66790a305b89 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2044.791715] env[62619]: DEBUG nova.compute.manager [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2044.968478] env[62619]: DEBUG nova.network.neutron [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Updating instance_info_cache with network_info: [{"id": "cb8fcd68-9c4f-498a-9161-617782b4cac3", "address": "fa:16:3e:bd:6c:87", "network": {"id": "50c6d508-1bcb-4435-b59e-4994f7ce32c0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1862625551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3f078f46880b423dae58d87e31f291d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb8fcd68-9c", "ovs_interfaceid": "cb8fcd68-9c4f-498a-9161-617782b4cac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.471194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2045.802914] env[62619]: DEBUG nova.compute.manager [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2045.823399] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2045.823689] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2045.823866] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2045.824087] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2045.824267] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2045.824414] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2045.824651] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2045.824837] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2045.825038] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2045.825208] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2045.825400] env[62619]: DEBUG nova.virt.hardware [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2045.826356] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5faeb0aa-2b45-47a5-b17d-4694965c6b6e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.834615] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6f63d7-83de-4f09-b93e-7d4510c7fd56 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.989400] env[62619]: DEBUG nova.compute.manager [req-2e9b1e83-264f-4766-aea3-b0fc70d42505 req-7a3d79f2-1d34-47d1-8ea9-ca3d8f3a1047 service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Received event network-vif-plugged-34b38d14-c4de-4ea8-ade4-66790a305b89 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2045.989400] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e9b1e83-264f-4766-aea3-b0fc70d42505 req-7a3d79f2-1d34-47d1-8ea9-ca3d8f3a1047 service nova] Acquiring lock "7b29140c-320e-4497-b724-2587e2ff5793-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.989400] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e9b1e83-264f-4766-aea3-b0fc70d42505 req-7a3d79f2-1d34-47d1-8ea9-ca3d8f3a1047 service nova] Lock "7b29140c-320e-4497-b724-2587e2ff5793-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.989400] env[62619]: DEBUG oslo_concurrency.lockutils [req-2e9b1e83-264f-4766-aea3-b0fc70d42505 req-7a3d79f2-1d34-47d1-8ea9-ca3d8f3a1047 service nova] Lock "7b29140c-320e-4497-b724-2587e2ff5793-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.989400] env[62619]: DEBUG nova.compute.manager [req-2e9b1e83-264f-4766-aea3-b0fc70d42505 req-7a3d79f2-1d34-47d1-8ea9-ca3d8f3a1047 service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] No waiting events found dispatching network-vif-plugged-34b38d14-c4de-4ea8-ade4-66790a305b89 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2045.989400] env[62619]: WARNING nova.compute.manager [req-2e9b1e83-264f-4766-aea3-b0fc70d42505 req-7a3d79f2-1d34-47d1-8ea9-ca3d8f3a1047 service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Received unexpected event network-vif-plugged-34b38d14-c4de-4ea8-ade4-66790a305b89 for instance with vm_state building and task_state spawning. [ 2046.086778] env[62619]: DEBUG nova.network.neutron [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Successfully updated port: 34b38d14-c4de-4ea8-ade4-66790a305b89 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2046.589621] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "refresh_cache-7b29140c-320e-4497-b724-2587e2ff5793" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2046.589783] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "refresh_cache-7b29140c-320e-4497-b724-2587e2ff5793" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2046.589886] env[62619]: DEBUG nova.network.neutron [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2047.007439] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2047.007795] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2bc028b3-cfe6-4d7b-9a7d-cc5cfde86cf2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.017408] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2047.017408] env[62619]: value = "task-1778735" [ 2047.017408] env[62619]: _type = "Task" [ 2047.017408] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.024845] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778735, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.123599] env[62619]: DEBUG nova.network.neutron [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2047.250639] env[62619]: DEBUG nova.network.neutron [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Updating instance_info_cache with network_info: [{"id": "34b38d14-c4de-4ea8-ade4-66790a305b89", "address": "fa:16:3e:f2:18:fc", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34b38d14-c4", "ovs_interfaceid": "34b38d14-c4de-4ea8-ade4-66790a305b89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.529099] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778735, 'name': PowerOffVM_Task, 'duration_secs': 0.319722} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.529387] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2047.530213] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f526b38a-c291-409a-869f-b33117ccc8e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.549292] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7c8796-6be2-4bb5-956d-c02d01c25f52 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.576029] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2047.576333] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7676f99d-94cd-4032-8982-63e47355748a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.583149] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2047.583149] env[62619]: value = "task-1778736" [ 2047.583149] env[62619]: _type = "Task" [ 2047.583149] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.590935] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.753994] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "refresh_cache-7b29140c-320e-4497-b724-2587e2ff5793" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2047.754389] env[62619]: DEBUG nova.compute.manager [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Instance network_info: |[{"id": "34b38d14-c4de-4ea8-ade4-66790a305b89", "address": "fa:16:3e:f2:18:fc", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34b38d14-c4", "ovs_interfaceid": "34b38d14-c4de-4ea8-ade4-66790a305b89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2047.754771] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:18:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34b38d14-c4de-4ea8-ade4-66790a305b89', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2047.762247] env[62619]: DEBUG oslo.service.loopingcall [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2047.762443] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2047.762659] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75b94fe5-1936-41b2-a647-db7f66a049d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.782527] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2047.782527] env[62619]: value = "task-1778737" [ 2047.782527] env[62619]: _type = "Task" [ 2047.782527] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.790149] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778737, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.014511] env[62619]: DEBUG nova.compute.manager [req-0e5197f9-397d-480a-a367-31fe11ccb957 req-24efd532-556b-4a4c-91a3-dc6799d360bf service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Received event network-changed-34b38d14-c4de-4ea8-ade4-66790a305b89 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2048.014786] env[62619]: DEBUG nova.compute.manager [req-0e5197f9-397d-480a-a367-31fe11ccb957 req-24efd532-556b-4a4c-91a3-dc6799d360bf service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Refreshing instance network info cache due to event network-changed-34b38d14-c4de-4ea8-ade4-66790a305b89. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2048.014977] env[62619]: DEBUG oslo_concurrency.lockutils [req-0e5197f9-397d-480a-a367-31fe11ccb957 req-24efd532-556b-4a4c-91a3-dc6799d360bf service nova] Acquiring lock "refresh_cache-7b29140c-320e-4497-b724-2587e2ff5793" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2048.015122] env[62619]: DEBUG oslo_concurrency.lockutils [req-0e5197f9-397d-480a-a367-31fe11ccb957 req-24efd532-556b-4a4c-91a3-dc6799d360bf service nova] Acquired lock "refresh_cache-7b29140c-320e-4497-b724-2587e2ff5793" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2048.015297] env[62619]: DEBUG nova.network.neutron [req-0e5197f9-397d-480a-a367-31fe11ccb957 req-24efd532-556b-4a4c-91a3-dc6799d360bf service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Refreshing network info cache for port 34b38d14-c4de-4ea8-ade4-66790a305b89 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2048.094712] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2048.094928] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2048.095182] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2048.095336] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2048.095514] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2048.095770] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46a6e9be-86c3-4cb6-8e4e-52c24d56b4c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.106666] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2048.106885] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2048.107684] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdb45a9f-95ca-4ca5-9b97-425babcb59e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.113178] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2048.113178] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ed1ce3-ca8e-4aa7-feb9-84fcccd02a89" [ 2048.113178] env[62619]: _type = "Task" [ 2048.113178] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.121156] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ed1ce3-ca8e-4aa7-feb9-84fcccd02a89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.292423] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778737, 'name': CreateVM_Task, 'duration_secs': 0.451491} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.292609] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2048.293287] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2048.293447] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2048.293765] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2048.294037] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fc3b70a-9af3-4cc4-a95e-df34e5e2e6b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.299429] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2048.299429] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52175d52-c840-a22f-be97-4f417520b1db" [ 2048.299429] env[62619]: _type = "Task" [ 2048.299429] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.308841] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52175d52-c840-a22f-be97-4f417520b1db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.625097] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ed1ce3-ca8e-4aa7-feb9-84fcccd02a89, 'name': SearchDatastore_Task, 'duration_secs': 0.008993} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.625857] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04e10f01-3426-498c-b085-552b4e6503ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.631325] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2048.631325] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5286fa19-ba35-51ec-64e1-50ca7a4d583b" [ 2048.631325] env[62619]: _type = "Task" [ 2048.631325] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.638817] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5286fa19-ba35-51ec-64e1-50ca7a4d583b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.712776] env[62619]: DEBUG nova.network.neutron [req-0e5197f9-397d-480a-a367-31fe11ccb957 req-24efd532-556b-4a4c-91a3-dc6799d360bf service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Updated VIF entry in instance network info cache for port 34b38d14-c4de-4ea8-ade4-66790a305b89. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2048.713147] env[62619]: DEBUG nova.network.neutron [req-0e5197f9-397d-480a-a367-31fe11ccb957 req-24efd532-556b-4a4c-91a3-dc6799d360bf service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Updating instance_info_cache with network_info: [{"id": "34b38d14-c4de-4ea8-ade4-66790a305b89", "address": "fa:16:3e:f2:18:fc", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34b38d14-c4", "ovs_interfaceid": "34b38d14-c4de-4ea8-ade4-66790a305b89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2048.810562] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52175d52-c840-a22f-be97-4f417520b1db, 'name': SearchDatastore_Task, 'duration_secs': 0.01133} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.810936] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2048.811262] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2048.811557] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2049.145157] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5286fa19-ba35-51ec-64e1-50ca7a4d583b, 'name': SearchDatastore_Task, 'duration_secs': 0.012451} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.145531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2049.145791] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8a780b5e-1767-47c3-99f7-4844e7df5346/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. {{(pid=62619) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2049.146203] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2049.146467] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2049.146746] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f05b5a1-f006-49d0-821e-3a47a46a2a2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.149094] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef63bb2c-7aed-4395-b9a5-a42dd86ceff7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.156152] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2049.156152] env[62619]: value = "task-1778738" [ 2049.156152] env[62619]: _type = "Task" [ 2049.156152] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.160222] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2049.160388] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2049.161367] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d8d2483-a209-4638-8c8f-26cc9d1cdb72 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.166241] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778738, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.169297] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2049.169297] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525dd762-44fb-3697-2d0e-289f1108b850" [ 2049.169297] env[62619]: _type = "Task" [ 2049.169297] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.176130] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525dd762-44fb-3697-2d0e-289f1108b850, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.215956] env[62619]: DEBUG oslo_concurrency.lockutils [req-0e5197f9-397d-480a-a367-31fe11ccb957 req-24efd532-556b-4a4c-91a3-dc6799d360bf service nova] Releasing lock "refresh_cache-7b29140c-320e-4497-b724-2587e2ff5793" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2049.666396] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778738, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467032} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.666688] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 8a780b5e-1767-47c3-99f7-4844e7df5346/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk. [ 2049.668600] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfdc590-6b7f-4683-b105-7670de814c9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.686756] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]525dd762-44fb-3697-2d0e-289f1108b850, 'name': SearchDatastore_Task, 'duration_secs': 0.017451} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.727597] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 8a780b5e-1767-47c3-99f7-4844e7df5346/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2049.727890] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a453fb3b-7748-4f4d-9f9a-84202220b8de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.730343] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c760b350-5a0d-4d7d-a94d-47b14392e9c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.746605] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2049.746605] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52258dcd-0a91-4116-f99a-7c716afc7657" [ 2049.746605] env[62619]: _type = "Task" [ 2049.746605] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.750963] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2049.750963] env[62619]: value = "task-1778739" [ 2049.750963] env[62619]: _type = "Task" [ 2049.750963] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.758144] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52258dcd-0a91-4116-f99a-7c716afc7657, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.762953] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778739, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.258102] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52258dcd-0a91-4116-f99a-7c716afc7657, 'name': SearchDatastore_Task, 'duration_secs': 0.021795} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.258665] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2050.258918] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 7b29140c-320e-4497-b724-2587e2ff5793/7b29140c-320e-4497-b724-2587e2ff5793.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2050.259159] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a704bde8-9219-44dc-9f31-94b6effc9b98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.263583] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778739, 'name': ReconfigVM_Task, 'duration_secs': 0.330737} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.264101] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 8a780b5e-1767-47c3-99f7-4844e7df5346/27a858d5-7985-4b17-8b01-50adcd8f566c-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2050.264852] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810acc18-4297-4ce2-9fc0-5a213d9810fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.268254] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2050.268254] env[62619]: value = "task-1778740" [ 2050.268254] env[62619]: _type = "Task" [ 2050.268254] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.291116] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04b8ae71-d8b2-48c3-ac94-a4dc2450fc87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.304012] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778740, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.308981] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2050.308981] env[62619]: value = "task-1778741" [ 2050.308981] env[62619]: _type = "Task" [ 2050.308981] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.316345] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778741, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.777928] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778740, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487769} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.778218] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 7b29140c-320e-4497-b724-2587e2ff5793/7b29140c-320e-4497-b724-2587e2ff5793.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2050.778453] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2050.778717] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f248cc85-82ea-4efc-9d4a-76f3e33d74df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.785640] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2050.785640] env[62619]: value = "task-1778742" [ 2050.785640] env[62619]: _type = "Task" [ 2050.785640] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.792595] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778742, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.818050] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778741, 'name': ReconfigVM_Task, 'duration_secs': 0.1443} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.818050] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2050.818272] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbc7fd37-8eeb-43bb-9d25-8fdea826380e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.823608] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2050.823608] env[62619]: value = "task-1778743" [ 2050.823608] env[62619]: _type = "Task" [ 2050.823608] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.830663] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778743, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.295124] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778742, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080661} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.295523] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2051.296161] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad2e8d5-f828-479a-93ab-89343473bd7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.317252] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 7b29140c-320e-4497-b724-2587e2ff5793/7b29140c-320e-4497-b724-2587e2ff5793.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2051.317481] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee0b6452-e688-44a9-bf02-663122a0abb1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.344410] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778743, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.345604] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2051.345604] env[62619]: value = "task-1778744" [ 2051.345604] env[62619]: _type = "Task" [ 2051.345604] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.352747] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778744, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.845134] env[62619]: DEBUG oslo_vmware.api [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778743, 'name': PowerOnVM_Task, 'duration_secs': 0.543668} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.845451] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2051.850540] env[62619]: DEBUG nova.compute.manager [None req-79c7bf36-7592-4cc3-8de7-6575ab776f98 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2051.851260] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b352f14-a273-49c9-aff6-e546915c704e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.861230] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778744, 'name': ReconfigVM_Task, 'duration_secs': 0.345182} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.861607] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 7b29140c-320e-4497-b724-2587e2ff5793/7b29140c-320e-4497-b724-2587e2ff5793.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2051.862182] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e428ce6-f9bb-489c-a39a-8fe200be71a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.867962] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2051.867962] env[62619]: value = "task-1778745" [ 2051.867962] env[62619]: _type = "Task" [ 2051.867962] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.874989] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778745, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.377192] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778745, 'name': Rename_Task, 'duration_secs': 0.185659} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.377535] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2052.377694] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df8d333f-9670-4911-9c6a-f373bdef9ef3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.384040] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2052.384040] env[62619]: value = "task-1778746" [ 2052.384040] env[62619]: _type = "Task" [ 2052.384040] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.391426] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778746, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.894729] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778746, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.303566] env[62619]: INFO nova.compute.manager [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Unrescuing [ 2053.303865] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.304037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquired lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.304211] env[62619]: DEBUG nova.network.neutron [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2053.395432] env[62619]: DEBUG oslo_vmware.api [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778746, 'name': PowerOnVM_Task, 'duration_secs': 0.585205} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.395834] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2053.395996] env[62619]: INFO nova.compute.manager [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Took 7.59 seconds to spawn the instance on the hypervisor. [ 2053.396232] env[62619]: DEBUG nova.compute.manager [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2053.397089] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403fdfb3-7253-4b1f-b113-5d78a5afcf56 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.912483] env[62619]: INFO nova.compute.manager [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Took 12.30 seconds to build instance. [ 2053.999127] env[62619]: DEBUG nova.network.neutron [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Updating instance_info_cache with network_info: [{"id": "cb8fcd68-9c4f-498a-9161-617782b4cac3", "address": "fa:16:3e:bd:6c:87", "network": {"id": "50c6d508-1bcb-4435-b59e-4994f7ce32c0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1862625551-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3f078f46880b423dae58d87e31f291d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb8fcd68-9c", "ovs_interfaceid": "cb8fcd68-9c4f-498a-9161-617782b4cac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2054.414890] env[62619]: DEBUG oslo_concurrency.lockutils [None req-da52f4ce-941b-4abb-8448-acbd2664d3bc tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "7b29140c-320e-4497-b724-2587e2ff5793" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.814s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.503792] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Releasing lock "refresh_cache-8a780b5e-1767-47c3-99f7-4844e7df5346" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.504478] env[62619]: DEBUG nova.objects.instance [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lazy-loading 'flavor' on Instance uuid 8a780b5e-1767-47c3-99f7-4844e7df5346 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2054.664063] env[62619]: DEBUG nova.compute.manager [req-1e093d48-3f63-4321-8899-a86e745d6aa8 req-a2ae888e-80b1-48f6-ab53-b9c04f8637da service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Received event network-changed-34b38d14-c4de-4ea8-ade4-66790a305b89 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2054.664349] env[62619]: DEBUG nova.compute.manager [req-1e093d48-3f63-4321-8899-a86e745d6aa8 req-a2ae888e-80b1-48f6-ab53-b9c04f8637da service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Refreshing instance network info cache due to event network-changed-34b38d14-c4de-4ea8-ade4-66790a305b89. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2054.664602] env[62619]: DEBUG oslo_concurrency.lockutils [req-1e093d48-3f63-4321-8899-a86e745d6aa8 req-a2ae888e-80b1-48f6-ab53-b9c04f8637da service nova] Acquiring lock "refresh_cache-7b29140c-320e-4497-b724-2587e2ff5793" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2054.664771] env[62619]: DEBUG oslo_concurrency.lockutils [req-1e093d48-3f63-4321-8899-a86e745d6aa8 req-a2ae888e-80b1-48f6-ab53-b9c04f8637da service nova] Acquired lock "refresh_cache-7b29140c-320e-4497-b724-2587e2ff5793" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2054.664982] env[62619]: DEBUG nova.network.neutron [req-1e093d48-3f63-4321-8899-a86e745d6aa8 req-a2ae888e-80b1-48f6-ab53-b9c04f8637da service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Refreshing network info cache for port 34b38d14-c4de-4ea8-ade4-66790a305b89 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2055.010411] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d0a251-9b1f-49ec-bf31-99132fa953ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.031893] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2055.032245] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dab29c13-6ac9-4f48-a489-9ced3a6b8b68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.038870] env[62619]: DEBUG oslo_vmware.api [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2055.038870] env[62619]: value = "task-1778747" [ 2055.038870] env[62619]: _type = "Task" [ 2055.038870] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.047038] env[62619]: DEBUG oslo_vmware.api [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.383352] env[62619]: DEBUG nova.network.neutron [req-1e093d48-3f63-4321-8899-a86e745d6aa8 req-a2ae888e-80b1-48f6-ab53-b9c04f8637da service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Updated VIF entry in instance network info cache for port 34b38d14-c4de-4ea8-ade4-66790a305b89. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2055.383765] env[62619]: DEBUG nova.network.neutron [req-1e093d48-3f63-4321-8899-a86e745d6aa8 req-a2ae888e-80b1-48f6-ab53-b9c04f8637da service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Updating instance_info_cache with network_info: [{"id": "34b38d14-c4de-4ea8-ade4-66790a305b89", "address": "fa:16:3e:f2:18:fc", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34b38d14-c4", "ovs_interfaceid": "34b38d14-c4de-4ea8-ade4-66790a305b89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2055.548474] env[62619]: DEBUG oslo_vmware.api [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778747, 'name': PowerOffVM_Task, 'duration_secs': 0.206779} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.548867] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2055.553944] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Reconfiguring VM instance instance-00000078 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2055.554230] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fcdfd2f1-d6b1-487a-9b56-37f505e6340d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.572368] env[62619]: DEBUG oslo_vmware.api [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2055.572368] env[62619]: value = "task-1778748" [ 2055.572368] env[62619]: _type = "Task" [ 2055.572368] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.580234] env[62619]: DEBUG oslo_vmware.api [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778748, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.886661] env[62619]: DEBUG oslo_concurrency.lockutils [req-1e093d48-3f63-4321-8899-a86e745d6aa8 req-a2ae888e-80b1-48f6-ab53-b9c04f8637da service nova] Releasing lock "refresh_cache-7b29140c-320e-4497-b724-2587e2ff5793" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2056.082687] env[62619]: DEBUG oslo_vmware.api [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778748, 'name': ReconfigVM_Task, 'duration_secs': 0.298} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.083079] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Reconfigured VM instance instance-00000078 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2056.083340] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2056.083624] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00a72248-3f47-4fad-bae7-1d3895d0e13c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.090889] env[62619]: DEBUG oslo_vmware.api [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2056.090889] env[62619]: value = "task-1778749" [ 2056.090889] env[62619]: _type = "Task" [ 2056.090889] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.099265] env[62619]: DEBUG oslo_vmware.api [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.600490] env[62619]: DEBUG oslo_vmware.api [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778749, 'name': PowerOnVM_Task, 'duration_secs': 0.362192} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.602073] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2056.602073] env[62619]: DEBUG nova.compute.manager [None req-d477c7e0-89fe-4fd0-963f-ce524b18b577 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2056.602317] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d96f0d7-0709-4f04-93e6-8fe24c294a92 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.875534] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "8a780b5e-1767-47c3-99f7-4844e7df5346" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.875909] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "8a780b5e-1767-47c3-99f7-4844e7df5346" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.876066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "8a780b5e-1767-47c3-99f7-4844e7df5346-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.876203] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "8a780b5e-1767-47c3-99f7-4844e7df5346-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.876338] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "8a780b5e-1767-47c3-99f7-4844e7df5346-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.878390] env[62619]: INFO nova.compute.manager [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Terminating instance [ 2058.382158] env[62619]: DEBUG nova.compute.manager [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2058.382380] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2058.383289] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b4f5ce-112f-4ad9-beec-c229c41ddbd3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.391198] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2058.391423] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bcb53f0-3634-41b7-bc96-ad12e401f1e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.396650] env[62619]: DEBUG oslo_vmware.api [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2058.396650] env[62619]: value = "task-1778750" [ 2058.396650] env[62619]: _type = "Task" [ 2058.396650] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.405533] env[62619]: DEBUG oslo_vmware.api [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.906268] env[62619]: DEBUG oslo_vmware.api [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778750, 'name': PowerOffVM_Task, 'duration_secs': 0.313651} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.906625] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2058.906688] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2058.906907] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aae841f0-2ddd-4fb6-9925-49baa686edc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.980231] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2058.980409] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2058.980592] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Deleting the datastore file [datastore1] 8a780b5e-1767-47c3-99f7-4844e7df5346 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2058.980867] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4769756-021a-4741-a877-ca3311476ce0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.987756] env[62619]: DEBUG oslo_vmware.api [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2058.987756] env[62619]: value = "task-1778752" [ 2058.987756] env[62619]: _type = "Task" [ 2058.987756] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.995469] env[62619]: DEBUG oslo_vmware.api [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778752, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.497686] env[62619]: DEBUG oslo_vmware.api [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778752, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139013} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.497930] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2059.498121] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2059.498299] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2059.498467] env[62619]: INFO nova.compute.manager [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2059.498702] env[62619]: DEBUG oslo.service.loopingcall [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2059.498892] env[62619]: DEBUG nova.compute.manager [-] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2059.498984] env[62619]: DEBUG nova.network.neutron [-] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2059.709722] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.709722] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 2059.709868] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.709997] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Cleaning up deleted instances with incomplete migration {{(pid=62619) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11647}} [ 2059.754063] env[62619]: DEBUG nova.compute.manager [req-1566f908-89ee-45c6-be9f-b02f3a861578 req-9f0c5a3c-c9be-413c-ad52-9eb5ef6ce9ea service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Received event network-vif-deleted-cb8fcd68-9c4f-498a-9161-617782b4cac3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2059.754278] env[62619]: INFO nova.compute.manager [req-1566f908-89ee-45c6-be9f-b02f3a861578 req-9f0c5a3c-c9be-413c-ad52-9eb5ef6ce9ea service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Neutron deleted interface cb8fcd68-9c4f-498a-9161-617782b4cac3; detaching it from the instance and deleting it from the info cache [ 2059.754447] env[62619]: DEBUG nova.network.neutron [req-1566f908-89ee-45c6-be9f-b02f3a861578 req-9f0c5a3c-c9be-413c-ad52-9eb5ef6ce9ea service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2060.235514] env[62619]: DEBUG nova.network.neutron [-] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2060.256786] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-caadc5bc-7590-4a45-844e-16b5cd8177fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.266534] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a275a95d-24ef-457d-8dca-55a9ba528310 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.295091] env[62619]: DEBUG nova.compute.manager [req-1566f908-89ee-45c6-be9f-b02f3a861578 req-9f0c5a3c-c9be-413c-ad52-9eb5ef6ce9ea service nova] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Detach interface failed, port_id=cb8fcd68-9c4f-498a-9161-617782b4cac3, reason: Instance 8a780b5e-1767-47c3-99f7-4844e7df5346 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2060.736655] env[62619]: INFO nova.compute.manager [-] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Took 1.24 seconds to deallocate network for instance. [ 2061.243663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.244016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.244195] env[62619]: DEBUG nova.objects.instance [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lazy-loading 'resources' on Instance uuid 8a780b5e-1767-47c3-99f7-4844e7df5346 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2061.844684] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9622ec19-515a-42bf-9513-439d8ac30ed9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.853039] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde2c020-4157-4a0e-893e-56822ff441e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.887272] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff56016c-a213-4336-b7ce-e24d4150faa5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.895041] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a1c744-62da-4725-a97d-1e21ce47ba10 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.910523] env[62619]: DEBUG nova.compute.provider_tree [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2062.414448] env[62619]: DEBUG nova.scheduler.client.report [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2062.919893] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2062.938108] env[62619]: INFO nova.scheduler.client.report [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Deleted allocations for instance 8a780b5e-1767-47c3-99f7-4844e7df5346 [ 2063.445245] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f6d1873a-7eb4-4e3e-b4df-bdc72fc24d26 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "8a780b5e-1767-47c3-99f7-4844e7df5346" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.569s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.211506] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2064.211752] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2064.473913] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.474326] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.474461] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.474558] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.474722] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.476818] env[62619]: INFO nova.compute.manager [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Terminating instance [ 2064.709658] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2064.980534] env[62619]: DEBUG nova.compute.manager [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2064.980750] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2064.981678] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1970e72-5b91-4fe1-9ee3-c17d28c1794a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.989503] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2064.989729] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afb3f3da-5f3b-4d4c-8119-0db368e33fe6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.997070] env[62619]: DEBUG oslo_vmware.api [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2064.997070] env[62619]: value = "task-1778753" [ 2064.997070] env[62619]: _type = "Task" [ 2064.997070] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.005850] env[62619]: DEBUG oslo_vmware.api [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.460662] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "0cde512e-a9f8-4f9b-9e08-41494090e314" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.460956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "0cde512e-a9f8-4f9b-9e08-41494090e314" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.461095] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "0cde512e-a9f8-4f9b-9e08-41494090e314-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.461277] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "0cde512e-a9f8-4f9b-9e08-41494090e314-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.461445] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "0cde512e-a9f8-4f9b-9e08-41494090e314-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.463662] env[62619]: INFO nova.compute.manager [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Terminating instance [ 2065.506170] env[62619]: DEBUG oslo_vmware.api [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778753, 'name': PowerOffVM_Task, 'duration_secs': 0.193834} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.506485] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2065.506574] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2065.506778] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-345c62e0-2f21-4475-a077-5d0bf7477730 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.584895] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2065.585144] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2065.585309] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Deleting the datastore file [datastore1] dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2065.585566] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1975858f-89d8-4a26-afcc-c86e3d1d5ffc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.592859] env[62619]: DEBUG oslo_vmware.api [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for the task: (returnval){ [ 2065.592859] env[62619]: value = "task-1778755" [ 2065.592859] env[62619]: _type = "Task" [ 2065.592859] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.600118] env[62619]: DEBUG oslo_vmware.api [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778755, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.967369] env[62619]: DEBUG nova.compute.manager [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2065.967635] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2065.968562] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6148bd71-5e0c-4e70-bad1-c5fdb4ba9dc3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.976407] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2065.976648] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1aefdfe4-9179-4aff-8333-022cd6611c95 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.982695] env[62619]: DEBUG oslo_vmware.api [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2065.982695] env[62619]: value = "task-1778756" [ 2065.982695] env[62619]: _type = "Task" [ 2065.982695] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.990311] env[62619]: DEBUG oslo_vmware.api [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778756, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.102627] env[62619]: DEBUG oslo_vmware.api [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Task: {'id': task-1778755, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191732} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.103108] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2066.103375] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2066.103615] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2066.103847] env[62619]: INFO nova.compute.manager [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2066.104478] env[62619]: DEBUG oslo.service.loopingcall [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2066.104478] env[62619]: DEBUG nova.compute.manager [-] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2066.104478] env[62619]: DEBUG nova.network.neutron [-] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2066.376578] env[62619]: DEBUG nova.compute.manager [req-19694f76-d8f7-44ce-9d9c-930201810450 req-81a67c33-7d38-4084-af60-87d79deaa10c service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Received event network-vif-deleted-5186dbd9-28b8-4b21-91a1-4cf5100781e6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2066.376791] env[62619]: INFO nova.compute.manager [req-19694f76-d8f7-44ce-9d9c-930201810450 req-81a67c33-7d38-4084-af60-87d79deaa10c service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Neutron deleted interface 5186dbd9-28b8-4b21-91a1-4cf5100781e6; detaching it from the instance and deleting it from the info cache [ 2066.377030] env[62619]: DEBUG nova.network.neutron [req-19694f76-d8f7-44ce-9d9c-930201810450 req-81a67c33-7d38-4084-af60-87d79deaa10c service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2066.492774] env[62619]: DEBUG oslo_vmware.api [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778756, 'name': PowerOffVM_Task, 'duration_secs': 0.262352} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.493104] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2066.493315] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2066.493561] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a3148d2-04bf-4bc3-a372-4211952d70bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.589605] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2066.590023] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2066.590023] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleting the datastore file [datastore1] 0cde512e-a9f8-4f9b-9e08-41494090e314 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2066.590201] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-772503ee-6531-41be-a7b0-b014848aaf58 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.598479] env[62619]: DEBUG oslo_vmware.api [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2066.598479] env[62619]: value = "task-1778758" [ 2066.598479] env[62619]: _type = "Task" [ 2066.598479] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.606184] env[62619]: DEBUG oslo_vmware.api [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778758, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.708821] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.709049] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 2066.854842] env[62619]: DEBUG nova.network.neutron [-] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2066.879053] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d5248e1-8984-4a18-a9e8-e64383c67a68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.888681] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f657b73-39ce-48f8-892f-970cc76e342f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.916353] env[62619]: DEBUG nova.compute.manager [req-19694f76-d8f7-44ce-9d9c-930201810450 req-81a67c33-7d38-4084-af60-87d79deaa10c service nova] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Detach interface failed, port_id=5186dbd9-28b8-4b21-91a1-4cf5100781e6, reason: Instance dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2067.107937] env[62619]: DEBUG oslo_vmware.api [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778758, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160937} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.108196] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2067.108305] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2067.108479] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2067.108647] env[62619]: INFO nova.compute.manager [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2067.108880] env[62619]: DEBUG oslo.service.loopingcall [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2067.109109] env[62619]: DEBUG nova.compute.manager [-] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2067.109206] env[62619]: DEBUG nova.network.neutron [-] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2067.357430] env[62619]: INFO nova.compute.manager [-] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Took 1.25 seconds to deallocate network for instance. [ 2067.715170] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Skipping network cache update for instance because it is being deleted. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10358}} [ 2067.865031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.865150] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.865400] env[62619]: DEBUG nova.objects.instance [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lazy-loading 'resources' on Instance uuid dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2068.030155] env[62619]: DEBUG nova.network.neutron [-] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.247522] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.247681] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquired lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.247827] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2068.402221] env[62619]: DEBUG nova.compute.manager [req-ff872211-f2e6-4e48-a41e-b12d877c6a98 req-507aeb81-fc63-4e14-9d7b-17b696e416e0 service nova] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Received event network-vif-deleted-9db75e6b-9462-4d5f-8939-a281ea75ea84 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2068.454738] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14c8924-3032-4fc7-8505-aff80c5a1faf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.462475] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dfdc48-bd38-4c5c-b266-909e0819fd9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.492625] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac34f5c3-5006-46fb-a409-888b068f12a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.499348] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97dfe2c0-b0e8-41f1-8b76-64ee3cb86840 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.511856] env[62619]: DEBUG nova.compute.provider_tree [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2068.533143] env[62619]: INFO nova.compute.manager [-] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Took 1.42 seconds to deallocate network for instance. [ 2069.015064] env[62619]: DEBUG nova.scheduler.client.report [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2069.037960] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.463847] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Updating instance_info_cache with network_info: [{"id": "4e33b817-e043-4b75-92ae-40c8132fcc06", "address": "fa:16:3e:bf:96:2b", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e33b817-e0", "ovs_interfaceid": "4e33b817-e043-4b75-92ae-40c8132fcc06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2069.519604] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.654s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.522890] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.484s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.522890] env[62619]: DEBUG nova.objects.instance [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'resources' on Instance uuid 0cde512e-a9f8-4f9b-9e08-41494090e314 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2069.542535] env[62619]: INFO nova.scheduler.client.report [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Deleted allocations for instance dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc [ 2069.966395] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Releasing lock "refresh_cache-e599f5ad-1b4d-4912-9b96-2544c52b0acf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.966601] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 2069.966802] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2069.966966] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2069.967154] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.051952] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ca91e3ca-b65b-41e8-ac33-5e0f263b9a14 tempest-ServerRescueTestJSON-781839232 tempest-ServerRescueTestJSON-781839232-project-member] Lock "dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.578s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.091549] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f76297c-2922-49cf-8d7e-dc0bd6d2291c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.099199] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e91950-239d-4a51-949f-d12b86494d4d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.129408] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18a0df8-6296-4e4d-bce7-86609934041d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.136656] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9add845-397e-4259-99bb-b1120da0fb0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.150983] env[62619]: DEBUG nova.compute.provider_tree [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2070.469700] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2070.654459] env[62619]: DEBUG nova.scheduler.client.report [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2071.160058] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.638s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.162440] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.693s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.162618] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.162768] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2071.163654] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5c300e-f5cd-4ec9-a7a3-e56390e04ba6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.172081] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6853423c-445c-4f7b-8463-6f4db2d6c789 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.188260] env[62619]: INFO nova.scheduler.client.report [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleted allocations for instance 0cde512e-a9f8-4f9b-9e08-41494090e314 [ 2071.189676] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5095afcd-8656-4f95-a667-2e360f5d57e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.198482] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b4a06b-157a-4143-8051-9386dffcf773 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.229749] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180352MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2071.229911] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.230167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.698271] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6516ec2d-1a5d-43c5-aa1f-a0eee84fc7b8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "0cde512e-a9f8-4f9b-9e08-41494090e314" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.237s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.259241] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 4c66bbdf-af6a-4705-8219-85cf19f8314e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.259505] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e599f5ad-1b4d-4912-9b96-2544c52b0acf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.259549] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 424da668-f458-44a6-9b38-e6c5db352a01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.259647] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 7b29140c-320e-4497-b724-2587e2ff5793 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.259823] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2072.259957] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2072.327569] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3080a33-1511-4675-913a-e3008b407b7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.335684] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7ac68b-9c8f-4e1e-bd29-0b0344f14dea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.365845] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c66f3c-9bbb-485d-bb4a-2371c4aa89c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.374027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a0027e-5db4-4370-a384-d891ef8a2b09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.389039] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2072.892013] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2073.397172] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2073.397530] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.167s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.397580] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2073.397718] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Cleaning up deleted instances {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11609}} [ 2073.710479] env[62619]: DEBUG nova.compute.manager [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 2073.909097] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] There are 39 instances to clean {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11618}} [ 2073.909282] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 8a780b5e-1767-47c3-99f7-4844e7df5346] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2074.233705] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2074.233958] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.412450] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: dff65fe6-9a61-412f-9d5c-ab2b2f34e0fc] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2074.738611] env[62619]: INFO nova.compute.claims [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2074.916033] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 0cde512e-a9f8-4f9b-9e08-41494090e314] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2075.244356] env[62619]: INFO nova.compute.resource_tracker [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating resource usage from migration 5c54decc-fb88-4aa1-85fc-6e1ff999b2e9 [ 2075.309448] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a295a717-f52a-4250-afb9-a50bb5ce9d26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.317920] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e54e0c8-0ed0-4eb1-9e30-1b612ca6714b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.346630] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef47ef35-8971-447d-93ce-7339c994b7e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.353077] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782eedb3-be64-4f2e-b9fa-50d77c1cb983 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.365424] env[62619]: DEBUG nova.compute.provider_tree [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2075.418897] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 314758ce-6522-47cf-8445-0d28b1b085b9] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2075.868668] env[62619]: DEBUG nova.scheduler.client.report [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2075.922295] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 9e69e4ab-f219-4ba8-8f66-6063d8ab242a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2076.374219] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.140s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.374467] env[62619]: INFO nova.compute.manager [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Migrating [ 2076.425736] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: a2064f8f-b928-44c0-86d8-c0bb9882dbde] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2076.888886] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.889107] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.889232] env[62619]: DEBUG nova.network.neutron [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2076.928210] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 8ed2e4af-b484-4cd5-89c0-6ba60188127a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2077.431340] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: b56800e8-1eab-4589-8d09-961f73973981] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2077.613872] env[62619]: DEBUG nova.network.neutron [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [{"id": "b1a6212d-63f4-4343-9100-d88707a89c10", "address": "fa:16:3e:48:b2:0f", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a6212d-63", "ovs_interfaceid": "b1a6212d-63f4-4343-9100-d88707a89c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.867722] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "424da668-f458-44a6-9b38-e6c5db352a01" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.867976] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.935271] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: cec0ea75-042d-4ee5-91d5-cad86456ab97] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2078.116651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.371816] env[62619]: DEBUG nova.compute.utils [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2078.439024] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 21d9fc7a-228e-4b33-8534-55285d4e6e96] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2078.874983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.943567] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 13eeb4aa-0f20-4aed-9453-66afb0ff1152] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2079.447360] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: f4e85890-ca7d-45a7-92ff-ab881c21c7ed] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2079.631548] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145ebfb7-6102-43c2-b89d-ee38540058a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.650163] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance '4c66bbdf-af6a-4705-8219-85cf19f8314e' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2079.950809] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "424da668-f458-44a6-9b38-e6c5db352a01" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2079.951026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2079.951313] env[62619]: INFO nova.compute.manager [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Attaching volume 705d7cf5-1175-4950-8c59-bfa2ea994183 to /dev/sdb [ 2079.953164] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: a0953370-77f2-4e3b-a92e-cb12b3a82361] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2079.986930] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8abcae-968a-4ee8-be43-76c3a1013fde {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.994447] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d352b0-66da-408c-8037-3ca6343dfd71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.008100] env[62619]: DEBUG nova.virt.block_device [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Updating existing volume attachment record: fe43dded-3fca-4b2f-8cc7-434746c4e9c0 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2080.156072] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2080.156439] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d135ef20-fb5c-42a7-bcb0-bad347adef04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.163015] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2080.163015] env[62619]: value = "task-1778760" [ 2080.163015] env[62619]: _type = "Task" [ 2080.163015] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.171181] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.457020] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e1cd6059-ddb0-4f10-a569-e0bc71a63f4b] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2080.672302] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778760, 'name': PowerOffVM_Task, 'duration_secs': 0.200386} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.672586] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2080.672769] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance '4c66bbdf-af6a-4705-8219-85cf19f8314e' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2080.961440] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 1d40d434-fa8f-463e-908a-24c61538fe33] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2081.179500] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2081.179810] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2081.179922] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2081.180119] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2081.180268] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2081.180413] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2081.180619] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2081.180824] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2081.181009] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2081.181206] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2081.181388] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2081.186622] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-159962a5-0e78-4a99-97a7-398aad96d7c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.203339] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2081.203339] env[62619]: value = "task-1778761" [ 2081.203339] env[62619]: _type = "Task" [ 2081.203339] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.211760] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778761, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.464692] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 70265068-1185-4f23-b0b4-ed2378c17a89] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2081.715356] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778761, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.967764] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 54da64a0-4acf-4025-9b51-7af61dbd55fc] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2082.213474] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778761, 'name': ReconfigVM_Task, 'duration_secs': 0.542651} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.213824] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance '4c66bbdf-af6a-4705-8219-85cf19f8314e' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2082.471368] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 8616e7b2-f1d3-4ba6-9c9d-a4422da7be8e] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2082.720473] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2082.720731] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2082.720888] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2082.721105] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2082.721263] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2082.721438] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2082.721616] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2082.721776] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2082.721939] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2082.722264] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2082.722461] env[62619]: DEBUG nova.virt.hardware [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2082.727751] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Reconfiguring VM instance instance-00000069 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2082.728050] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81dd1093-a72a-4e72-a490-a1df509446cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.748309] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2082.748309] env[62619]: value = "task-1778763" [ 2082.748309] env[62619]: _type = "Task" [ 2082.748309] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.756091] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778763, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.975069] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: d3aa352b-7d2d-416e-a579-9636619bb025] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2083.258152] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778763, 'name': ReconfigVM_Task, 'duration_secs': 0.265828} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.258445] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Reconfigured VM instance instance-00000069 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2083.259260] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3169023-1e79-43bb-ba41-9c4fc343581a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.280729] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 4c66bbdf-af6a-4705-8219-85cf19f8314e/4c66bbdf-af6a-4705-8219-85cf19f8314e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2083.280729] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48614c36-ff96-4d4e-b6c3-67ade76ae240 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.298522] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2083.298522] env[62619]: value = "task-1778764" [ 2083.298522] env[62619]: _type = "Task" [ 2083.298522] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.308850] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778764, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.478663] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: a250f05d-cd74-436d-b656-2a9e55527809] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2083.807780] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778764, 'name': ReconfigVM_Task, 'duration_secs': 0.271219} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.808065] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 4c66bbdf-af6a-4705-8219-85cf19f8314e/4c66bbdf-af6a-4705-8219-85cf19f8314e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2083.808322] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance '4c66bbdf-af6a-4705-8219-85cf19f8314e' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2083.981637] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: cee0356c-25d7-48ca-be09-16b0e1b56a41] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2084.316479] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f38b43e-d3de-45ad-be66-23760ab560c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.335079] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a89834-9237-4016-ae0e-0697b6dce7f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.352311] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance '4c66bbdf-af6a-4705-8219-85cf19f8314e' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2084.485394] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 0987b6ec-2587-4f15-adbb-f563e19ecce9] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2084.554485] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2084.554731] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369206', 'volume_id': '705d7cf5-1175-4950-8c59-bfa2ea994183', 'name': 'volume-705d7cf5-1175-4950-8c59-bfa2ea994183', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '424da668-f458-44a6-9b38-e6c5db352a01', 'attached_at': '', 'detached_at': '', 'volume_id': '705d7cf5-1175-4950-8c59-bfa2ea994183', 'serial': '705d7cf5-1175-4950-8c59-bfa2ea994183'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2084.555605] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13ad0b1-d9fa-4dec-aabf-c61f3d588785 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.572544] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ca2e08-422b-4073-bd41-b7a6cca4f8a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.595736] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] volume-705d7cf5-1175-4950-8c59-bfa2ea994183/volume-705d7cf5-1175-4950-8c59-bfa2ea994183.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2084.595998] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b96d645-2869-40a3-a6cf-67f386f51ed9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.613288] env[62619]: DEBUG oslo_vmware.api [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2084.613288] env[62619]: value = "task-1778765" [ 2084.613288] env[62619]: _type = "Task" [ 2084.613288] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.622036] env[62619]: DEBUG oslo_vmware.api [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778765, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.891920] env[62619]: DEBUG nova.network.neutron [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Port b1a6212d-63f4-4343-9100-d88707a89c10 binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2084.988785] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 52b29fef-eab6-4541-a570-af9c0c021a75] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2085.124657] env[62619]: DEBUG oslo_vmware.api [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778765, 'name': ReconfigVM_Task, 'duration_secs': 0.318962} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.124903] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Reconfigured VM instance instance-00000077 to attach disk [datastore1] volume-705d7cf5-1175-4950-8c59-bfa2ea994183/volume-705d7cf5-1175-4950-8c59-bfa2ea994183.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2085.129358] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d17e3f41-c8a4-4a9a-b1d2-3d354c57429e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.143846] env[62619]: DEBUG oslo_vmware.api [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2085.143846] env[62619]: value = "task-1778766" [ 2085.143846] env[62619]: _type = "Task" [ 2085.143846] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.151182] env[62619]: DEBUG oslo_vmware.api [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778766, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.492444] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 848da7a1-9cec-4715-bbe1-ef7a51b3a5c4] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2085.654093] env[62619]: DEBUG oslo_vmware.api [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778766, 'name': ReconfigVM_Task, 'duration_secs': 0.145221} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.654432] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369206', 'volume_id': '705d7cf5-1175-4950-8c59-bfa2ea994183', 'name': 'volume-705d7cf5-1175-4950-8c59-bfa2ea994183', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '424da668-f458-44a6-9b38-e6c5db352a01', 'attached_at': '', 'detached_at': '', 'volume_id': '705d7cf5-1175-4950-8c59-bfa2ea994183', 'serial': '705d7cf5-1175-4950-8c59-bfa2ea994183'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2085.909104] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "4c66bbdf-af6a-4705-8219-85cf19f8314e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.909339] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.909517] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.996283] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e19650da-cc3d-4350-be3e-dc776ce68206] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2086.499646] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: f3345332-5a22-4a1c-ac74-4e8f2ceb3f15] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2086.689857] env[62619]: DEBUG nova.objects.instance [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'flavor' on Instance uuid 424da668-f458-44a6-9b38-e6c5db352a01 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2086.949930] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2086.950140] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2086.950311] env[62619]: DEBUG nova.network.neutron [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2087.002923] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 32aed8cd-1583-4253-bfb6-a98610e2f32e] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2087.194846] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a948553d-0537-42e7-9641-4969e7f303b0 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.244s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.506380] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e302e431-1f95-4ab5-bfca-59450fd887f0] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2087.663882] env[62619]: DEBUG nova.network.neutron [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [{"id": "b1a6212d-63f4-4343-9100-d88707a89c10", "address": "fa:16:3e:48:b2:0f", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a6212d-63", "ovs_interfaceid": "b1a6212d-63f4-4343-9100-d88707a89c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.009286] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 8745aa7f-9848-4320-94b5-08b7e3bccf80] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2088.030760] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "424da668-f458-44a6-9b38-e6c5db352a01" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2088.030981] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.166013] env[62619]: DEBUG oslo_concurrency.lockutils [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2088.512476] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 7cb51b51-514d-4223-a82a-5cdbdab9482a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2088.534961] env[62619]: DEBUG nova.compute.utils [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2088.690025] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964e9134-7e11-4c3c-9869-fe32e7d19b45 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.708679] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024e9763-3e0a-4945-8e14-11ef39ad8853 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.715301] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance '4c66bbdf-af6a-4705-8219-85cf19f8314e' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2089.016268] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 917960ca-3870-4e4e-aafe-3c6d77cf7c51] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2089.038161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.220930] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2089.221242] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ca2b6c6-529b-4191-8fc1-f17158a50fc0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.228469] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2089.228469] env[62619]: value = "task-1778767" [ 2089.228469] env[62619]: _type = "Task" [ 2089.228469] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.235927] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778767, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.519688] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: f1f14ada-dcf4-45f9-83ea-6ac2a4a93c8e] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2089.739196] env[62619]: DEBUG oslo_vmware.api [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778767, 'name': PowerOnVM_Task, 'duration_secs': 0.375803} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.739543] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2089.739758] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-92866291-fe9e-4fae-8ae1-74c71fb90bf4 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance '4c66bbdf-af6a-4705-8219-85cf19f8314e' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2090.023397] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4858096a-9683-4a7c-bbeb-4e6b2f5401cf] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2090.102839] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "424da668-f458-44a6-9b38-e6c5db352a01" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.103167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.103455] env[62619]: INFO nova.compute.manager [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Attaching volume bea386e7-01b5-4edc-8350-b323672dda39 to /dev/sdc [ 2090.136261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca53caa5-b93a-4a17-937d-9b4d64afb07d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.143545] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69be6651-2443-4262-b9c2-f96d39e9ffe3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.157450] env[62619]: DEBUG nova.virt.block_device [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Updating existing volume attachment record: 2766c047-b130-4404-bc99-c7dab46b3f64 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2090.527570] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4983b333-debb-4a2b-b28d-b321f0d8d7d7] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2090.892139] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "7b29140c-320e-4497-b724-2587e2ff5793" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.892396] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "7b29140c-320e-4497-b724-2587e2ff5793" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.892579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "7b29140c-320e-4497-b724-2587e2ff5793-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.892759] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "7b29140c-320e-4497-b724-2587e2ff5793-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.892925] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "7b29140c-320e-4497-b724-2587e2ff5793-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.895096] env[62619]: INFO nova.compute.manager [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Terminating instance [ 2091.031905] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 11869077-b428-413f-9f8f-7eac08d2d9ec] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2091.398780] env[62619]: DEBUG nova.compute.manager [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2091.398780] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2091.399612] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2600ed1-8634-404d-914c-152885c10809 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.407777] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2091.408016] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-534869e7-78d4-422f-9152-462038cd5023 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.413563] env[62619]: DEBUG oslo_vmware.api [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2091.413563] env[62619]: value = "task-1778769" [ 2091.413563] env[62619]: _type = "Task" [ 2091.413563] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.421686] env[62619]: DEBUG oslo_vmware.api [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778769, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.535552] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 5cf7ca57-351f-48ab-8758-b30f50cd607f] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2091.825587] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.825847] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.825982] env[62619]: DEBUG nova.compute.manager [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Going to confirm migration 9 {{(pid=62619) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 2091.923242] env[62619]: DEBUG oslo_vmware.api [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778769, 'name': PowerOffVM_Task, 'duration_secs': 0.270146} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.923504] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2091.923672] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2091.923919] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc89de16-23cd-449b-b21a-6c198f3862c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.991092] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2091.991403] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2091.991552] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleting the datastore file [datastore1] 7b29140c-320e-4497-b724-2587e2ff5793 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2091.991806] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d906e7d2-0fa8-469f-aaa8-10ee3f816f62 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.998298] env[62619]: DEBUG oslo_vmware.api [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2091.998298] env[62619]: value = "task-1778771" [ 2091.998298] env[62619]: _type = "Task" [ 2091.998298] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.005603] env[62619]: DEBUG oslo_vmware.api [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.039256] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 4cd6dafd-4f19-4d0f-8e07-8171a6a71e85] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2092.394924] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.395160] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.395355] env[62619]: DEBUG nova.network.neutron [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2092.395552] env[62619]: DEBUG nova.objects.instance [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'info_cache' on Instance uuid 4c66bbdf-af6a-4705-8219-85cf19f8314e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2092.508808] env[62619]: DEBUG oslo_vmware.api [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201456} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.509084] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2092.509270] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2092.509441] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2092.509610] env[62619]: INFO nova.compute.manager [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2092.509848] env[62619]: DEBUG oslo.service.loopingcall [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2092.510126] env[62619]: DEBUG nova.compute.manager [-] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2092.510238] env[62619]: DEBUG nova.network.neutron [-] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2092.542928] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e32cb991-a018-4b55-8cdf-378e212c8434] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2092.954124] env[62619]: DEBUG nova.compute.manager [req-b1d7600f-19b1-4170-a066-9a33eda88d03 req-2c8251ab-edc1-4310-b076-2f4b343a4093 service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Received event network-vif-deleted-34b38d14-c4de-4ea8-ade4-66790a305b89 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2092.954230] env[62619]: INFO nova.compute.manager [req-b1d7600f-19b1-4170-a066-9a33eda88d03 req-2c8251ab-edc1-4310-b076-2f4b343a4093 service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Neutron deleted interface 34b38d14-c4de-4ea8-ade4-66790a305b89; detaching it from the instance and deleting it from the info cache [ 2092.954345] env[62619]: DEBUG nova.network.neutron [req-b1d7600f-19b1-4170-a066-9a33eda88d03 req-2c8251ab-edc1-4310-b076-2f4b343a4093 service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.046282] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 006c9f0b-4b53-4740-9f67-ec9b19b8bcb2] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2093.433055] env[62619]: DEBUG nova.network.neutron [-] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.457059] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a8111fe-0080-49ed-a0f7-8e28fbd4b5ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.471424] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7a7657-4747-4ed6-bc5e-11fc9132679f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.498258] env[62619]: DEBUG nova.compute.manager [req-b1d7600f-19b1-4170-a066-9a33eda88d03 req-2c8251ab-edc1-4310-b076-2f4b343a4093 service nova] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Detach interface failed, port_id=34b38d14-c4de-4ea8-ade4-66790a305b89, reason: Instance 7b29140c-320e-4497-b724-2587e2ff5793 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2093.551181] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2093.620140] env[62619]: DEBUG nova.network.neutron [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [{"id": "b1a6212d-63f4-4343-9100-d88707a89c10", "address": "fa:16:3e:48:b2:0f", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a6212d-63", "ovs_interfaceid": "b1a6212d-63f4-4343-9100-d88707a89c10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.935456] env[62619]: INFO nova.compute.manager [-] [instance: 7b29140c-320e-4497-b724-2587e2ff5793] Took 1.42 seconds to deallocate network for instance. [ 2094.123356] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-4c66bbdf-af6a-4705-8219-85cf19f8314e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2094.123636] env[62619]: DEBUG nova.objects.instance [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'migration_context' on Instance uuid 4c66bbdf-af6a-4705-8219-85cf19f8314e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2094.441923] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.442217] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.442463] env[62619]: DEBUG nova.objects.instance [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lazy-loading 'resources' on Instance uuid 7b29140c-320e-4497-b724-2587e2ff5793 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2094.626721] env[62619]: DEBUG nova.objects.base [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Object Instance<4c66bbdf-af6a-4705-8219-85cf19f8314e> lazy-loaded attributes: info_cache,migration_context {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2094.627707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581b4e7f-9064-443c-a34e-8f1c33a08967 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.647446] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02387450-9059-4757-b6e0-5c2fa378acf2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.652837] env[62619]: DEBUG oslo_vmware.api [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2094.652837] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52de7649-dbdc-de01-ca73-6bb4fdbe9106" [ 2094.652837] env[62619]: _type = "Task" [ 2094.652837] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.661713] env[62619]: DEBUG oslo_vmware.api [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52de7649-dbdc-de01-ca73-6bb4fdbe9106, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.699086] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2094.699356] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369207', 'volume_id': 'bea386e7-01b5-4edc-8350-b323672dda39', 'name': 'volume-bea386e7-01b5-4edc-8350-b323672dda39', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '424da668-f458-44a6-9b38-e6c5db352a01', 'attached_at': '', 'detached_at': '', 'volume_id': 'bea386e7-01b5-4edc-8350-b323672dda39', 'serial': 'bea386e7-01b5-4edc-8350-b323672dda39'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2094.700180] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3044399-0abc-4f58-af10-6750ba29df73 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.716059] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b205d5-8eb2-4353-8512-c13c32a87bf6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.742647] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] volume-bea386e7-01b5-4edc-8350-b323672dda39/volume-bea386e7-01b5-4edc-8350-b323672dda39.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2094.742885] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4591e38c-7e8b-4c97-bd9e-79570bda3629 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.760163] env[62619]: DEBUG oslo_vmware.api [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2094.760163] env[62619]: value = "task-1778773" [ 2094.760163] env[62619]: _type = "Task" [ 2094.760163] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.767588] env[62619]: DEBUG oslo_vmware.api [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778773, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.016009] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090205f8-0f46-43b1-a1bb-b4cfc075513e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.023917] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9eaf484-73cc-4d5e-9a01-ccdf54e773f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.054099] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2095.055070] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73beae41-bc63-4924-a80c-31376b00a5b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.061778] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98ae3e9-6d34-4ac2-a0ce-ca5086ec3fd3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.075756] env[62619]: DEBUG nova.compute.provider_tree [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2095.162145] env[62619]: DEBUG oslo_vmware.api [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52de7649-dbdc-de01-ca73-6bb4fdbe9106, 'name': SearchDatastore_Task, 'duration_secs': 0.010715} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.162447] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.269584] env[62619]: DEBUG oslo_vmware.api [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778773, 'name': ReconfigVM_Task, 'duration_secs': 0.368436} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.269836] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Reconfigured VM instance instance-00000077 to attach disk [datastore1] volume-bea386e7-01b5-4edc-8350-b323672dda39/volume-bea386e7-01b5-4edc-8350-b323672dda39.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2095.274369] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f973dbc-fa14-4152-858c-f0dc73df3c81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.288843] env[62619]: DEBUG oslo_vmware.api [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2095.288843] env[62619]: value = "task-1778774" [ 2095.288843] env[62619]: _type = "Task" [ 2095.288843] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.296146] env[62619]: DEBUG oslo_vmware.api [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778774, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.579999] env[62619]: DEBUG nova.scheduler.client.report [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2095.799119] env[62619]: DEBUG oslo_vmware.api [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778774, 'name': ReconfigVM_Task, 'duration_secs': 0.126037} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.799413] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369207', 'volume_id': 'bea386e7-01b5-4edc-8350-b323672dda39', 'name': 'volume-bea386e7-01b5-4edc-8350-b323672dda39', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '424da668-f458-44a6-9b38-e6c5db352a01', 'attached_at': '', 'detached_at': '', 'volume_id': 'bea386e7-01b5-4edc-8350-b323672dda39', 'serial': 'bea386e7-01b5-4edc-8350-b323672dda39'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2096.083935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.642s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.086086] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.924s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2096.103187] env[62619]: INFO nova.scheduler.client.report [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleted allocations for instance 7b29140c-320e-4497-b724-2587e2ff5793 [ 2096.612049] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b8ec84ca-2a8c-475d-81ea-b2530cfc36fa tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "7b29140c-320e-4497-b724-2587e2ff5793" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.719s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.654261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca5b692-6d8f-481a-84e6-ceb8fc3760f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.662159] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa46545-0851-426b-ab55-e84add2c67e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.693189] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4c0c69-8416-4b5b-8c37-db0fdb575e59 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.700880] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda09dec-e299-4d56-bc93-df6df3475007 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.714346] env[62619]: DEBUG nova.compute.provider_tree [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2096.837837] env[62619]: DEBUG nova.objects.instance [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'flavor' on Instance uuid 424da668-f458-44a6-9b38-e6c5db352a01 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2097.217081] env[62619]: DEBUG nova.scheduler.client.report [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2097.342499] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f3ebd884-66c2-4f9f-b897-187a929d7b75 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.239s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.650250] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "424da668-f458-44a6-9b38-e6c5db352a01" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2097.650522] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2097.863116] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2097.863495] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.153878] env[62619]: INFO nova.compute.manager [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Detaching volume 705d7cf5-1175-4950-8c59-bfa2ea994183 [ 2098.190630] env[62619]: INFO nova.virt.block_device [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Attempting to driver detach volume 705d7cf5-1175-4950-8c59-bfa2ea994183 from mountpoint /dev/sdb [ 2098.190731] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2098.190874] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369206', 'volume_id': '705d7cf5-1175-4950-8c59-bfa2ea994183', 'name': 'volume-705d7cf5-1175-4950-8c59-bfa2ea994183', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '424da668-f458-44a6-9b38-e6c5db352a01', 'attached_at': '', 'detached_at': '', 'volume_id': '705d7cf5-1175-4950-8c59-bfa2ea994183', 'serial': '705d7cf5-1175-4950-8c59-bfa2ea994183'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2098.191812] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a129388-035b-498d-84c9-fdd5d29807b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.215542] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b095acbf-1d17-4710-94fa-fbf4a06f9032 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.222287] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ca62c6-1640-46e6-adc9-da4123e9cbc7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.226816] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.141s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.247250] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8cb05e8-d1e2-4c26-ac23-567346e10990 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.261805] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] The volume has not been displaced from its original location: [datastore1] volume-705d7cf5-1175-4950-8c59-bfa2ea994183/volume-705d7cf5-1175-4950-8c59-bfa2ea994183.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2098.266919] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Reconfiguring VM instance instance-00000077 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2098.267796] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aaa32a64-04c8-4796-b176-0aff179a86fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.285130] env[62619]: DEBUG oslo_vmware.api [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2098.285130] env[62619]: value = "task-1778775" [ 2098.285130] env[62619]: _type = "Task" [ 2098.285130] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.293033] env[62619]: DEBUG oslo_vmware.api [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778775, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.366252] env[62619]: INFO nova.compute.manager [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Detaching volume b1cdf37a-a1a8-498a-a080-5172a5357cfd [ 2098.397162] env[62619]: INFO nova.virt.block_device [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Attempting to driver detach volume b1cdf37a-a1a8-498a-a080-5172a5357cfd from mountpoint /dev/sdb [ 2098.397543] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2098.397823] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369202', 'volume_id': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'name': 'volume-b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e599f5ad-1b4d-4912-9b96-2544c52b0acf', 'attached_at': '', 'detached_at': '', 'volume_id': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'serial': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2098.398903] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51197bd6-c3cc-45cb-a809-0deecc25cd05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.425466] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a2e152-bca6-4013-8df4-e80584e2c688 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.433099] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679ebf0f-bdd2-4b49-83f4-9ed0a8c630c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.453184] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120e93dd-869c-4452-b2d4-8c786ec408bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.467637] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] The volume has not been displaced from its original location: [datastore1] volume-b1cdf37a-a1a8-498a-a080-5172a5357cfd/volume-b1cdf37a-a1a8-498a-a080-5172a5357cfd.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2098.472847] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Reconfiguring VM instance instance-00000075 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2098.473133] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-719a292a-13f4-4f30-9769-32138fa0a2d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.489745] env[62619]: DEBUG oslo_vmware.api [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2098.489745] env[62619]: value = "task-1778776" [ 2098.489745] env[62619]: _type = "Task" [ 2098.489745] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.497519] env[62619]: DEBUG oslo_vmware.api [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778776, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.795229] env[62619]: DEBUG oslo_vmware.api [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778775, 'name': ReconfigVM_Task, 'duration_secs': 0.217594} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.795586] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Reconfigured VM instance instance-00000077 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2098.800836] env[62619]: INFO nova.scheduler.client.report [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleted allocation for migration 5c54decc-fb88-4aa1-85fc-6e1ff999b2e9 [ 2098.801831] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e35c987f-9f13-4686-902f-160e0453f930 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.821067] env[62619]: DEBUG oslo_vmware.api [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2098.821067] env[62619]: value = "task-1778777" [ 2098.821067] env[62619]: _type = "Task" [ 2098.821067] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.828750] env[62619]: DEBUG oslo_vmware.api [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778777, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.999798] env[62619]: DEBUG oslo_vmware.api [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778776, 'name': ReconfigVM_Task, 'duration_secs': 0.247402} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.000160] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Reconfigured VM instance instance-00000075 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2099.004673] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fc276ea-df34-473a-8cda-cac21fe47ad0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.018982] env[62619]: DEBUG oslo_vmware.api [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2099.018982] env[62619]: value = "task-1778778" [ 2099.018982] env[62619]: _type = "Task" [ 2099.018982] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.029402] env[62619]: DEBUG oslo_vmware.api [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778778, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.317466] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d57e9390-a9be-4cbc-b440-465bda194d72 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.492s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.330848] env[62619]: DEBUG oslo_vmware.api [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778777, 'name': ReconfigVM_Task, 'duration_secs': 0.132144} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.331194] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369206', 'volume_id': '705d7cf5-1175-4950-8c59-bfa2ea994183', 'name': 'volume-705d7cf5-1175-4950-8c59-bfa2ea994183', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '424da668-f458-44a6-9b38-e6c5db352a01', 'attached_at': '', 'detached_at': '', 'volume_id': '705d7cf5-1175-4950-8c59-bfa2ea994183', 'serial': '705d7cf5-1175-4950-8c59-bfa2ea994183'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2099.531103] env[62619]: DEBUG oslo_vmware.api [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778778, 'name': ReconfigVM_Task, 'duration_secs': 0.204736} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.531103] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369202', 'volume_id': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'name': 'volume-b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e599f5ad-1b4d-4912-9b96-2544c52b0acf', 'attached_at': '', 'detached_at': '', 'volume_id': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd', 'serial': 'b1cdf37a-a1a8-498a-a080-5172a5357cfd'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2099.871861] env[62619]: DEBUG nova.objects.instance [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'flavor' on Instance uuid 424da668-f458-44a6-9b38-e6c5db352a01 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2099.914943] env[62619]: DEBUG oslo_concurrency.lockutils [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2099.915288] env[62619]: DEBUG oslo_concurrency.lockutils [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.915568] env[62619]: DEBUG oslo_concurrency.lockutils [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "4c66bbdf-af6a-4705-8219-85cf19f8314e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2099.915813] env[62619]: DEBUG oslo_concurrency.lockutils [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.916047] env[62619]: DEBUG oslo_concurrency.lockutils [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.918420] env[62619]: INFO nova.compute.manager [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Terminating instance [ 2100.071334] env[62619]: DEBUG nova.objects.instance [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lazy-loading 'flavor' on Instance uuid e599f5ad-1b4d-4912-9b96-2544c52b0acf {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2100.423141] env[62619]: DEBUG nova.compute.manager [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2100.423380] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2100.424275] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4841cc-b516-4feb-9b2f-854aad1c6b27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.432152] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2100.432374] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a62dc83a-c683-421c-9cd0-e160fa78ae69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.438414] env[62619]: DEBUG oslo_vmware.api [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2100.438414] env[62619]: value = "task-1778779" [ 2100.438414] env[62619]: _type = "Task" [ 2100.438414] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.445943] env[62619]: DEBUG oslo_vmware.api [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.880213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5daa976-30e9-4dea-949a-42e251b7b7d5 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.229s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2100.887330] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "424da668-f458-44a6-9b38-e6c5db352a01" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.887537] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.948077] env[62619]: DEBUG oslo_vmware.api [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778779, 'name': PowerOffVM_Task, 'duration_secs': 0.491755} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.948364] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2100.948531] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2100.948776] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26a46537-78bc-4e59-bdc4-b67d7ac225b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.032613] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2101.032861] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2101.033016] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleting the datastore file [datastore1] 4c66bbdf-af6a-4705-8219-85cf19f8314e {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2101.033284] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81d6008c-927a-4fd6-95a6-559519ad76dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.038871] env[62619]: DEBUG oslo_vmware.api [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2101.038871] env[62619]: value = "task-1778781" [ 2101.038871] env[62619]: _type = "Task" [ 2101.038871] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.046726] env[62619]: DEBUG oslo_vmware.api [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.077307] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9e1c530c-65c1-4bf3-a7fc-d2b95ce833c5 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.214s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.389897] env[62619]: INFO nova.compute.manager [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Detaching volume bea386e7-01b5-4edc-8350-b323672dda39 [ 2101.419488] env[62619]: INFO nova.virt.block_device [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Attempting to driver detach volume bea386e7-01b5-4edc-8350-b323672dda39 from mountpoint /dev/sdc [ 2101.419848] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2101.420062] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369207', 'volume_id': 'bea386e7-01b5-4edc-8350-b323672dda39', 'name': 'volume-bea386e7-01b5-4edc-8350-b323672dda39', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '424da668-f458-44a6-9b38-e6c5db352a01', 'attached_at': '', 'detached_at': '', 'volume_id': 'bea386e7-01b5-4edc-8350-b323672dda39', 'serial': 'bea386e7-01b5-4edc-8350-b323672dda39'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2101.420926] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4313445f-7b2c-41f5-80ad-249cef58a9ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.443338] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a864919-fc41-43d1-9485-4d29185d9190 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.450376] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6ac0e5-c29e-4ce7-85be-9bcf5bcb19f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.470069] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2a452a-bcef-4a26-8452-7421910e8e27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.485192] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] The volume has not been displaced from its original location: [datastore1] volume-bea386e7-01b5-4edc-8350-b323672dda39/volume-bea386e7-01b5-4edc-8350-b323672dda39.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2101.491154] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Reconfiguring VM instance instance-00000077 to detach disk 2002 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2101.491470] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-165a0854-4a89-41c0-a9ea-3933d0398ccb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.509103] env[62619]: DEBUG oslo_vmware.api [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2101.509103] env[62619]: value = "task-1778782" [ 2101.509103] env[62619]: _type = "Task" [ 2101.509103] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.519937] env[62619]: DEBUG oslo_vmware.api [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778782, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.548618] env[62619]: DEBUG oslo_vmware.api [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142529} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.548912] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2101.549131] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2101.549378] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2101.549589] env[62619]: INFO nova.compute.manager [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2101.549895] env[62619]: DEBUG oslo.service.loopingcall [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2101.550151] env[62619]: DEBUG nova.compute.manager [-] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2101.550278] env[62619]: DEBUG nova.network.neutron [-] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2102.019298] env[62619]: DEBUG oslo_vmware.api [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778782, 'name': ReconfigVM_Task, 'duration_secs': 0.238248} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.019625] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Reconfigured VM instance instance-00000077 to detach disk 2002 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2102.024302] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7492440-2179-44d0-bfe3-73f8694ad693 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.039520] env[62619]: DEBUG oslo_vmware.api [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2102.039520] env[62619]: value = "task-1778783" [ 2102.039520] env[62619]: _type = "Task" [ 2102.039520] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.047846] env[62619]: DEBUG oslo_vmware.api [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778783, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.109593] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.109937] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.110085] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.110249] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.110425] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2102.112624] env[62619]: INFO nova.compute.manager [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Terminating instance [ 2102.210290] env[62619]: DEBUG nova.compute.manager [req-105decc5-5ccf-4cdd-b54d-bf59ef336ff8 req-c58aacae-d8a6-42e3-b28e-f9ec36a556b3 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Received event network-vif-deleted-b1a6212d-63f4-4343-9100-d88707a89c10 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2102.210397] env[62619]: INFO nova.compute.manager [req-105decc5-5ccf-4cdd-b54d-bf59ef336ff8 req-c58aacae-d8a6-42e3-b28e-f9ec36a556b3 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Neutron deleted interface b1a6212d-63f4-4343-9100-d88707a89c10; detaching it from the instance and deleting it from the info cache [ 2102.210543] env[62619]: DEBUG nova.network.neutron [req-105decc5-5ccf-4cdd-b54d-bf59ef336ff8 req-c58aacae-d8a6-42e3-b28e-f9ec36a556b3 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2102.548786] env[62619]: DEBUG oslo_vmware.api [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778783, 'name': ReconfigVM_Task, 'duration_secs': 0.182771} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.549087] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369207', 'volume_id': 'bea386e7-01b5-4edc-8350-b323672dda39', 'name': 'volume-bea386e7-01b5-4edc-8350-b323672dda39', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '424da668-f458-44a6-9b38-e6c5db352a01', 'attached_at': '', 'detached_at': '', 'volume_id': 'bea386e7-01b5-4edc-8350-b323672dda39', 'serial': 'bea386e7-01b5-4edc-8350-b323672dda39'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2102.616141] env[62619]: DEBUG nova.compute.manager [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2102.616410] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2102.617298] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c2d43d-3fc0-485b-8e78-de8ac567dbc8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.625172] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2102.625458] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bfa1e1b-91af-4519-9927-c154115bd5b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.631926] env[62619]: DEBUG oslo_vmware.api [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2102.631926] env[62619]: value = "task-1778784" [ 2102.631926] env[62619]: _type = "Task" [ 2102.631926] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.641941] env[62619]: DEBUG oslo_vmware.api [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.683450] env[62619]: DEBUG nova.network.neutron [-] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2102.712876] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-152ac9d8-1aae-41e5-9bd2-8b641f3a581f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.722731] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9e3f18-8b6a-4728-b570-3262bc80be18 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.748598] env[62619]: DEBUG nova.compute.manager [req-105decc5-5ccf-4cdd-b54d-bf59ef336ff8 req-c58aacae-d8a6-42e3-b28e-f9ec36a556b3 service nova] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Detach interface failed, port_id=b1a6212d-63f4-4343-9100-d88707a89c10, reason: Instance 4c66bbdf-af6a-4705-8219-85cf19f8314e could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2103.089915] env[62619]: DEBUG nova.objects.instance [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'flavor' on Instance uuid 424da668-f458-44a6-9b38-e6c5db352a01 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2103.141950] env[62619]: DEBUG oslo_vmware.api [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778784, 'name': PowerOffVM_Task, 'duration_secs': 0.232736} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.142317] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2103.142375] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2103.142608] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14aa9d26-5ab5-415b-8e4e-5bbe39eef870 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.186223] env[62619]: INFO nova.compute.manager [-] [instance: 4c66bbdf-af6a-4705-8219-85cf19f8314e] Took 1.64 seconds to deallocate network for instance. [ 2103.240947] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2103.241163] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2103.241371] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleting the datastore file [datastore1] e599f5ad-1b4d-4912-9b96-2544c52b0acf {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2103.241619] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3ac6971-e7e1-4810-8d0f-a02677bb710b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.248693] env[62619]: DEBUG oslo_vmware.api [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2103.248693] env[62619]: value = "task-1778786" [ 2103.248693] env[62619]: _type = "Task" [ 2103.248693] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.256294] env[62619]: DEBUG oslo_vmware.api [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778786, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.692751] env[62619]: DEBUG oslo_concurrency.lockutils [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.693032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.693167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.714807] env[62619]: INFO nova.scheduler.client.report [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleted allocations for instance 4c66bbdf-af6a-4705-8219-85cf19f8314e [ 2103.757882] env[62619]: DEBUG oslo_vmware.api [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778786, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137468} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.758698] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2103.758884] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2103.759075] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2103.759280] env[62619]: INFO nova.compute.manager [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2103.759513] env[62619]: DEBUG oslo.service.loopingcall [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2103.759690] env[62619]: DEBUG nova.compute.manager [-] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2103.759782] env[62619]: DEBUG nova.network.neutron [-] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2104.097801] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f9e7db13-bcf3-4259-9704-5126323fbb39 tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.210s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.221588] env[62619]: DEBUG oslo_concurrency.lockutils [None req-66e711b9-19ca-4f2a-a731-a74366cf3ab0 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "4c66bbdf-af6a-4705-8219-85cf19f8314e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.306s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.240886] env[62619]: DEBUG nova.compute.manager [req-b97bcac6-775a-4340-bfbc-7bdce21122a9 req-1c347c41-e090-479b-8ddc-4ef0adc1c46c service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Received event network-vif-deleted-4e33b817-e043-4b75-92ae-40c8132fcc06 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2104.241158] env[62619]: INFO nova.compute.manager [req-b97bcac6-775a-4340-bfbc-7bdce21122a9 req-1c347c41-e090-479b-8ddc-4ef0adc1c46c service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Neutron deleted interface 4e33b817-e043-4b75-92ae-40c8132fcc06; detaching it from the instance and deleting it from the info cache [ 2104.241378] env[62619]: DEBUG nova.network.neutron [req-b97bcac6-775a-4340-bfbc-7bdce21122a9 req-1c347c41-e090-479b-8ddc-4ef0adc1c46c service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2104.716122] env[62619]: DEBUG nova.network.neutron [-] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2104.744323] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df18f3c8-5683-4267-835c-abe93640ad02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.757290] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac968061-2143-444f-ab7a-7857c5d18058 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.786364] env[62619]: DEBUG nova.compute.manager [req-b97bcac6-775a-4340-bfbc-7bdce21122a9 req-1c347c41-e090-479b-8ddc-4ef0adc1c46c service nova] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Detach interface failed, port_id=4e33b817-e043-4b75-92ae-40c8132fcc06, reason: Instance e599f5ad-1b4d-4912-9b96-2544c52b0acf could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2104.869384] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.869501] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.219430] env[62619]: INFO nova.compute.manager [-] [instance: e599f5ad-1b4d-4912-9b96-2544c52b0acf] Took 1.46 seconds to deallocate network for instance. [ 2105.273011] env[62619]: DEBUG oslo_concurrency.lockutils [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "424da668-f458-44a6-9b38-e6c5db352a01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2105.273347] env[62619]: DEBUG oslo_concurrency.lockutils [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.273469] env[62619]: DEBUG oslo_concurrency.lockutils [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "424da668-f458-44a6-9b38-e6c5db352a01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2105.273645] env[62619]: DEBUG oslo_concurrency.lockutils [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.273808] env[62619]: DEBUG oslo_concurrency.lockutils [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.275666] env[62619]: INFO nova.compute.manager [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Terminating instance [ 2105.371724] env[62619]: DEBUG nova.compute.manager [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2105.725472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2105.725696] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.725910] env[62619]: DEBUG nova.objects.instance [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lazy-loading 'resources' on Instance uuid e599f5ad-1b4d-4912-9b96-2544c52b0acf {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2105.779111] env[62619]: DEBUG nova.compute.manager [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2105.779318] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2105.780141] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a7602f-07b6-49b9-b27c-a694554d551e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.788078] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2105.788296] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45004c8c-447b-42c3-be06-d3b8dad89d4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.795150] env[62619]: DEBUG oslo_vmware.api [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2105.795150] env[62619]: value = "task-1778787" [ 2105.795150] env[62619]: _type = "Task" [ 2105.795150] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.803072] env[62619]: DEBUG oslo_vmware.api [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778787, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.893336] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.274490] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ae5dd5-69c0-4b22-a6b0-da773169bf45 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.282284] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5300ff56-e230-4492-a470-4634cedf5af4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.313404] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623c6eaa-6c45-4291-a493-60f34bb51a05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.320373] env[62619]: DEBUG oslo_vmware.api [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778787, 'name': PowerOffVM_Task, 'duration_secs': 0.173191} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.322298] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2106.322471] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2106.322719] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6875fc91-d281-4cfa-a414-3172cd8486e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.324831] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b35de6-418f-4691-8c36-b137ff1b7c8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.337812] env[62619]: DEBUG nova.compute.provider_tree [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2106.412165] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2106.412433] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2106.412617] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Deleting the datastore file [datastore1] 424da668-f458-44a6-9b38-e6c5db352a01 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2106.412870] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a151b3e-b2e4-4217-a2c9-824401d6be5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.420439] env[62619]: DEBUG oslo_vmware.api [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for the task: (returnval){ [ 2106.420439] env[62619]: value = "task-1778789" [ 2106.420439] env[62619]: _type = "Task" [ 2106.420439] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.429491] env[62619]: DEBUG oslo_vmware.api [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778789, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.841135] env[62619]: DEBUG nova.scheduler.client.report [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2106.930465] env[62619]: DEBUG oslo_vmware.api [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Task: {'id': task-1778789, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217731} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.930653] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2106.930831] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2106.931000] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2106.931216] env[62619]: INFO nova.compute.manager [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2106.931454] env[62619]: DEBUG oslo.service.loopingcall [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2106.931641] env[62619]: DEBUG nova.compute.manager [-] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2106.931733] env[62619]: DEBUG nova.network.neutron [-] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2107.345933] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2107.348523] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.455s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2107.350106] env[62619]: INFO nova.compute.claims [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2107.366634] env[62619]: INFO nova.scheduler.client.report [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleted allocations for instance e599f5ad-1b4d-4912-9b96-2544c52b0acf [ 2107.399047] env[62619]: DEBUG nova.compute.manager [req-f2001ae0-b489-4c56-93cb-a556152beb8d req-b6f61c64-0650-4a64-833c-8fde5ab5410d service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Received event network-vif-deleted-3c71631e-780b-40b4-b5d0-47a37178aa17 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2107.399201] env[62619]: INFO nova.compute.manager [req-f2001ae0-b489-4c56-93cb-a556152beb8d req-b6f61c64-0650-4a64-833c-8fde5ab5410d service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Neutron deleted interface 3c71631e-780b-40b4-b5d0-47a37178aa17; detaching it from the instance and deleting it from the info cache [ 2107.399709] env[62619]: DEBUG nova.network.neutron [req-f2001ae0-b489-4c56-93cb-a556152beb8d req-b6f61c64-0650-4a64-833c-8fde5ab5410d service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2107.876301] env[62619]: DEBUG nova.network.neutron [-] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2107.880625] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4260d732-8e03-4f6e-8111-f636f4e9cc3a tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e599f5ad-1b4d-4912-9b96-2544c52b0acf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.770s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2107.903048] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8a1623c-dfd2-47c5-9c39-1b8636b7da48 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.913696] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f5ad41-4620-4c54-a2b3-1a78bd137080 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.938530] env[62619]: DEBUG nova.compute.manager [req-f2001ae0-b489-4c56-93cb-a556152beb8d req-b6f61c64-0650-4a64-833c-8fde5ab5410d service nova] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Detach interface failed, port_id=3c71631e-780b-40b4-b5d0-47a37178aa17, reason: Instance 424da668-f458-44a6-9b38-e6c5db352a01 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2108.384058] env[62619]: INFO nova.compute.manager [-] [instance: 424da668-f458-44a6-9b38-e6c5db352a01] Took 1.45 seconds to deallocate network for instance. [ 2108.403777] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae689ce-002e-4676-9547-ad08ec575db8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.412033] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1e3bd6-d7f0-4afb-9bbd-c14922dff29e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.444978] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e530ac-664c-4614-8ca9-0beeec3b7762 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.453484] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e1482a-d656-4997-8706-ed95f288ff6d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.469083] env[62619]: DEBUG nova.compute.provider_tree [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2108.889528] env[62619]: DEBUG oslo_concurrency.lockutils [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2108.975027] env[62619]: DEBUG nova.scheduler.client.report [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2109.477896] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.130s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2109.478466] env[62619]: DEBUG nova.compute.manager [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2109.481641] env[62619]: DEBUG oslo_concurrency.lockutils [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.592s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2109.481882] env[62619]: DEBUG nova.objects.instance [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lazy-loading 'resources' on Instance uuid 424da668-f458-44a6-9b38-e6c5db352a01 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2109.984776] env[62619]: DEBUG nova.compute.utils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2109.986242] env[62619]: DEBUG nova.compute.manager [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2109.986417] env[62619]: DEBUG nova.network.neutron [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2110.032409] env[62619]: DEBUG nova.policy [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53deb71781e14136bff2b0b6c6a82890', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2c7470712c14fa9bc1804ae2431107b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 2110.039374] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04219abc-850e-4ddb-8233-53ebd2ec8883 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.046972] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8678597b-75ec-44f4-9a7e-5a6067c69043 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.078571] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106f2b0e-8c56-498a-9344-e4a3db38e3e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.085869] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7c1fbc-3d7e-4d4f-9d5c-fc49c19c0ddc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.099209] env[62619]: DEBUG nova.compute.provider_tree [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2110.173830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "98b58e56-1249-42e7-a421-c836340cc4d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.173830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.327370] env[62619]: DEBUG nova.network.neutron [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Successfully created port: f8d4076b-f0ea-4483-a42a-79e288c9c6ca {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2110.494029] env[62619]: DEBUG nova.compute.manager [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2110.602642] env[62619]: DEBUG nova.scheduler.client.report [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2110.676369] env[62619]: DEBUG nova.compute.manager [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2111.107809] env[62619]: DEBUG oslo_concurrency.lockutils [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.626s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.166446] env[62619]: INFO nova.scheduler.client.report [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Deleted allocations for instance 424da668-f458-44a6-9b38-e6c5db352a01 [ 2111.193772] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2111.194029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.195597] env[62619]: INFO nova.compute.claims [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2111.501994] env[62619]: DEBUG nova.compute.manager [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2111.530023] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2111.530298] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2111.530463] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2111.530653] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2111.530802] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2111.530947] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2111.531171] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2111.531378] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2111.531572] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2111.531745] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2111.531926] env[62619]: DEBUG nova.virt.hardware [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2111.532831] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46c02bc-6c73-4a9e-a1d9-fb8cbc653365 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.541144] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a058e3cb-6282-4ee6-a7a0-08c6abdb7b3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.674654] env[62619]: DEBUG oslo_concurrency.lockutils [None req-afd5c262-f676-421f-bf79-3c4cd167062d tempest-AttachVolumeTestJSON-711873483 tempest-AttachVolumeTestJSON-711873483-project-member] Lock "424da668-f458-44a6-9b38-e6c5db352a01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.401s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.798677] env[62619]: DEBUG nova.compute.manager [req-f8b85928-6d00-4aba-a76a-1343c0e5c0c3 req-e4930b9a-2e49-4c28-b6dc-36bad32ed6f0 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Received event network-vif-plugged-f8d4076b-f0ea-4483-a42a-79e288c9c6ca {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2111.798815] env[62619]: DEBUG oslo_concurrency.lockutils [req-f8b85928-6d00-4aba-a76a-1343c0e5c0c3 req-e4930b9a-2e49-4c28-b6dc-36bad32ed6f0 service nova] Acquiring lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2111.799032] env[62619]: DEBUG oslo_concurrency.lockutils [req-f8b85928-6d00-4aba-a76a-1343c0e5c0c3 req-e4930b9a-2e49-4c28-b6dc-36bad32ed6f0 service nova] Lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.799217] env[62619]: DEBUG oslo_concurrency.lockutils [req-f8b85928-6d00-4aba-a76a-1343c0e5c0c3 req-e4930b9a-2e49-4c28-b6dc-36bad32ed6f0 service nova] Lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.799619] env[62619]: DEBUG nova.compute.manager [req-f8b85928-6d00-4aba-a76a-1343c0e5c0c3 req-e4930b9a-2e49-4c28-b6dc-36bad32ed6f0 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] No waiting events found dispatching network-vif-plugged-f8d4076b-f0ea-4483-a42a-79e288c9c6ca {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2111.799810] env[62619]: WARNING nova.compute.manager [req-f8b85928-6d00-4aba-a76a-1343c0e5c0c3 req-e4930b9a-2e49-4c28-b6dc-36bad32ed6f0 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Received unexpected event network-vif-plugged-f8d4076b-f0ea-4483-a42a-79e288c9c6ca for instance with vm_state building and task_state spawning. [ 2111.891160] env[62619]: DEBUG nova.network.neutron [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Successfully updated port: f8d4076b-f0ea-4483-a42a-79e288c9c6ca {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2112.241831] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f01e5fa-0024-4369-aa9c-e72a7df3d8b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.249920] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957927e8-888a-438b-9c6c-cfb8b9a66e6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.279409] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2240e6a8-2b68-41d7-b3e8-dea608c9f8ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.286342] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68732b7e-5c19-4551-8b30-13c00ea83916 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.298842] env[62619]: DEBUG nova.compute.provider_tree [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2112.394211] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2112.394435] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2112.394499] env[62619]: DEBUG nova.network.neutron [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2112.801666] env[62619]: DEBUG nova.scheduler.client.report [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2112.927074] env[62619]: DEBUG nova.network.neutron [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2113.063921] env[62619]: DEBUG nova.network.neutron [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance_info_cache with network_info: [{"id": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "address": "fa:16:3e:14:50:cc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d4076b-f0", "ovs_interfaceid": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.306427] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.112s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.306953] env[62619]: DEBUG nova.compute.manager [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2113.566240] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2113.566529] env[62619]: DEBUG nova.compute.manager [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Instance network_info: |[{"id": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "address": "fa:16:3e:14:50:cc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d4076b-f0", "ovs_interfaceid": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2113.566954] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:50:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8d4076b-f0ea-4483-a42a-79e288c9c6ca', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2113.574375] env[62619]: DEBUG oslo.service.loopingcall [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2113.574573] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2113.574788] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54ab260e-2450-43b6-91b0-eb89cf0a9114 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.593977] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2113.593977] env[62619]: value = "task-1778791" [ 2113.593977] env[62619]: _type = "Task" [ 2113.593977] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.601208] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778791, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.812064] env[62619]: DEBUG nova.compute.utils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2113.813515] env[62619]: DEBUG nova.compute.manager [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2113.813685] env[62619]: DEBUG nova.network.neutron [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2113.822204] env[62619]: DEBUG nova.compute.manager [req-a2a0b924-e33e-4c4c-9551-ed7818bad263 req-e3b9ffe7-de73-487f-b415-30a83a347a03 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Received event network-changed-f8d4076b-f0ea-4483-a42a-79e288c9c6ca {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2113.822447] env[62619]: DEBUG nova.compute.manager [req-a2a0b924-e33e-4c4c-9551-ed7818bad263 req-e3b9ffe7-de73-487f-b415-30a83a347a03 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Refreshing instance network info cache due to event network-changed-f8d4076b-f0ea-4483-a42a-79e288c9c6ca. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2113.822671] env[62619]: DEBUG oslo_concurrency.lockutils [req-a2a0b924-e33e-4c4c-9551-ed7818bad263 req-e3b9ffe7-de73-487f-b415-30a83a347a03 service nova] Acquiring lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2113.822811] env[62619]: DEBUG oslo_concurrency.lockutils [req-a2a0b924-e33e-4c4c-9551-ed7818bad263 req-e3b9ffe7-de73-487f-b415-30a83a347a03 service nova] Acquired lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2113.822976] env[62619]: DEBUG nova.network.neutron [req-a2a0b924-e33e-4c4c-9551-ed7818bad263 req-e3b9ffe7-de73-487f-b415-30a83a347a03 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Refreshing network info cache for port f8d4076b-f0ea-4483-a42a-79e288c9c6ca {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2113.860195] env[62619]: DEBUG nova.policy [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a13407c9bfc448bb27a06680d41afb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72ae04936c9b4ea19b5d7fac78c96ba4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 2114.104229] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778791, 'name': CreateVM_Task, 'duration_secs': 0.36543} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.104318] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2114.105091] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.105267] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.105608] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2114.105915] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db33ddd6-08b3-4f5a-b9c1-39c6f4bfa273 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.110687] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2114.110687] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5279feec-9611-89a2-5c0c-09638d35c7ee" [ 2114.110687] env[62619]: _type = "Task" [ 2114.110687] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.118993] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5279feec-9611-89a2-5c0c-09638d35c7ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.130703] env[62619]: DEBUG nova.network.neutron [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Successfully created port: 862f8881-6c2c-413b-b5f6-eb389ced9b21 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2114.317108] env[62619]: DEBUG nova.compute.manager [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2114.512424] env[62619]: DEBUG nova.network.neutron [req-a2a0b924-e33e-4c4c-9551-ed7818bad263 req-e3b9ffe7-de73-487f-b415-30a83a347a03 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updated VIF entry in instance network info cache for port f8d4076b-f0ea-4483-a42a-79e288c9c6ca. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2114.512770] env[62619]: DEBUG nova.network.neutron [req-a2a0b924-e33e-4c4c-9551-ed7818bad263 req-e3b9ffe7-de73-487f-b415-30a83a347a03 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance_info_cache with network_info: [{"id": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "address": "fa:16:3e:14:50:cc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d4076b-f0", "ovs_interfaceid": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2114.620433] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5279feec-9611-89a2-5c0c-09638d35c7ee, 'name': SearchDatastore_Task, 'duration_secs': 0.009963} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.620727] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.620951] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2114.621190] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.621360] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.621551] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2114.621796] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0c5b329-2a49-4146-86ff-0bfdce31f833 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.632749] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2114.632911] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2114.633624] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c645efa-ace5-4479-8876-e66c1e53afa8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.638308] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2114.638308] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a4bbdb-406a-488e-1eeb-db70ae8869f6" [ 2114.638308] env[62619]: _type = "Task" [ 2114.638308] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.646650] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a4bbdb-406a-488e-1eeb-db70ae8869f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.015142] env[62619]: DEBUG oslo_concurrency.lockutils [req-a2a0b924-e33e-4c4c-9551-ed7818bad263 req-e3b9ffe7-de73-487f-b415-30a83a347a03 service nova] Releasing lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.149135] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52a4bbdb-406a-488e-1eeb-db70ae8869f6, 'name': SearchDatastore_Task, 'duration_secs': 0.016132} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.149917] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07b75c60-f944-4345-b311-cc42ad92d7ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.155735] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2115.155735] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5285fb5d-5ad8-987a-7c8e-493c37e3cd4c" [ 2115.155735] env[62619]: _type = "Task" [ 2115.155735] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.163198] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5285fb5d-5ad8-987a-7c8e-493c37e3cd4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.326800] env[62619]: DEBUG nova.compute.manager [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2115.352610] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2115.352857] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2115.353022] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2115.353208] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2115.353352] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2115.353496] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2115.353697] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2115.353919] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2115.354118] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2115.354281] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2115.354450] env[62619]: DEBUG nova.virt.hardware [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2115.355326] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ed4d4b-28ee-4c0e-ba92-9e305ed1eca8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.363100] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca392a33-3699-41f7-a5ba-1fda2c95cd03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.592256] env[62619]: DEBUG nova.network.neutron [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Successfully updated port: 862f8881-6c2c-413b-b5f6-eb389ced9b21 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2115.665874] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]5285fb5d-5ad8-987a-7c8e-493c37e3cd4c, 'name': SearchDatastore_Task, 'duration_secs': 0.009147} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.666090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.666347] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1a2a66f3-1fe8-4b26-a3e2-8083815e0427/1a2a66f3-1fe8-4b26-a3e2-8083815e0427.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2115.666604] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed512452-a6b7-425c-bdca-645bcdd51d57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.673623] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2115.673623] env[62619]: value = "task-1778793" [ 2115.673623] env[62619]: _type = "Task" [ 2115.673623] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.680941] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.854236] env[62619]: DEBUG nova.compute.manager [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Received event network-vif-plugged-862f8881-6c2c-413b-b5f6-eb389ced9b21 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2115.854611] env[62619]: DEBUG oslo_concurrency.lockutils [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] Acquiring lock "98b58e56-1249-42e7-a421-c836340cc4d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.854963] env[62619]: DEBUG oslo_concurrency.lockutils [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] Lock "98b58e56-1249-42e7-a421-c836340cc4d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.855270] env[62619]: DEBUG oslo_concurrency.lockutils [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] Lock "98b58e56-1249-42e7-a421-c836340cc4d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.855537] env[62619]: DEBUG nova.compute.manager [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] No waiting events found dispatching network-vif-plugged-862f8881-6c2c-413b-b5f6-eb389ced9b21 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2115.855799] env[62619]: WARNING nova.compute.manager [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Received unexpected event network-vif-plugged-862f8881-6c2c-413b-b5f6-eb389ced9b21 for instance with vm_state building and task_state spawning. [ 2115.856099] env[62619]: DEBUG nova.compute.manager [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Received event network-changed-862f8881-6c2c-413b-b5f6-eb389ced9b21 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2115.856387] env[62619]: DEBUG nova.compute.manager [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Refreshing instance network info cache due to event network-changed-862f8881-6c2c-413b-b5f6-eb389ced9b21. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2115.856715] env[62619]: DEBUG oslo_concurrency.lockutils [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] Acquiring lock "refresh_cache-98b58e56-1249-42e7-a421-c836340cc4d4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.856955] env[62619]: DEBUG oslo_concurrency.lockutils [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] Acquired lock "refresh_cache-98b58e56-1249-42e7-a421-c836340cc4d4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.857252] env[62619]: DEBUG nova.network.neutron [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Refreshing network info cache for port 862f8881-6c2c-413b-b5f6-eb389ced9b21 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2116.095161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "refresh_cache-98b58e56-1249-42e7-a421-c836340cc4d4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.184204] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778793, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.389794] env[62619]: DEBUG nova.network.neutron [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2116.459107] env[62619]: DEBUG nova.network.neutron [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2116.683946] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778793, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.961932] env[62619]: DEBUG oslo_concurrency.lockutils [req-d57fbef0-f146-42b1-b417-c33c1de0813c req-de5dfcc6-542e-4593-86cb-b50297d2505a service nova] Releasing lock "refresh_cache-98b58e56-1249-42e7-a421-c836340cc4d4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.962351] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "refresh_cache-98b58e56-1249-42e7-a421-c836340cc4d4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.962513] env[62619]: DEBUG nova.network.neutron [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2117.184791] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778793, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.492372] env[62619]: DEBUG nova.network.neutron [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2117.616488] env[62619]: DEBUG nova.network.neutron [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Updating instance_info_cache with network_info: [{"id": "862f8881-6c2c-413b-b5f6-eb389ced9b21", "address": "fa:16:3e:ed:e9:ee", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap862f8881-6c", "ovs_interfaceid": "862f8881-6c2c-413b-b5f6-eb389ced9b21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.685355] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778793, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.119582] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "refresh_cache-98b58e56-1249-42e7-a421-c836340cc4d4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.119922] env[62619]: DEBUG nova.compute.manager [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Instance network_info: |[{"id": "862f8881-6c2c-413b-b5f6-eb389ced9b21", "address": "fa:16:3e:ed:e9:ee", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap862f8881-6c", "ovs_interfaceid": "862f8881-6c2c-413b-b5f6-eb389ced9b21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2118.120363] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:e9:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '862f8881-6c2c-413b-b5f6-eb389ced9b21', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2118.127858] env[62619]: DEBUG oslo.service.loopingcall [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2118.128084] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2118.128314] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a37d6d5a-c682-4ae3-95b0-730ee67ae179 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.147899] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2118.147899] env[62619]: value = "task-1778795" [ 2118.147899] env[62619]: _type = "Task" [ 2118.147899] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.161211] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778795, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.188126] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778793, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.364499} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.188542] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 1a2a66f3-1fe8-4b26-a3e2-8083815e0427/1a2a66f3-1fe8-4b26-a3e2-8083815e0427.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2118.188692] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2118.188983] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1825a512-04f6-4df9-898b-ec85980a7ddf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.195297] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2118.195297] env[62619]: value = "task-1778796" [ 2118.195297] env[62619]: _type = "Task" [ 2118.195297] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.205475] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778796, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.658637] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778795, 'name': CreateVM_Task, 'duration_secs': 0.325051} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.658816] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2118.659412] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.659578] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.659902] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2118.660160] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a640afe5-2b1e-4d9c-8b81-929b295b1bf0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.664502] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2118.664502] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52923655-c1ca-7d0d-2344-011cee3fb2f4" [ 2118.664502] env[62619]: _type = "Task" [ 2118.664502] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.672331] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52923655-c1ca-7d0d-2344-011cee3fb2f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.702783] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778796, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059104} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.703033] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2118.703732] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76827453-b7f2-40fa-b8e4-b02133d154d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.724504] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 1a2a66f3-1fe8-4b26-a3e2-8083815e0427/1a2a66f3-1fe8-4b26-a3e2-8083815e0427.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2118.724677] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8f491a1-b186-4361-a4c4-9205db3a5768 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.743409] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2118.743409] env[62619]: value = "task-1778797" [ 2118.743409] env[62619]: _type = "Task" [ 2118.743409] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.750566] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778797, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.174950] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52923655-c1ca-7d0d-2344-011cee3fb2f4, 'name': SearchDatastore_Task, 'duration_secs': 0.008795} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.175318] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2119.175517] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2119.176055] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2119.176055] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2119.176177] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2119.176375] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efbbeaca-6293-4654-bd56-0c55fef2dadf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.184184] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2119.184389] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2119.185092] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d08c453-e7a3-434e-823d-d2260e2503bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.189656] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2119.189656] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522578de-1853-3dd2-16fe-b78f35a73cf6" [ 2119.189656] env[62619]: _type = "Task" [ 2119.189656] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.197952] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522578de-1853-3dd2-16fe-b78f35a73cf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.252113] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778797, 'name': ReconfigVM_Task, 'duration_secs': 0.40736} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.252381] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 1a2a66f3-1fe8-4b26-a3e2-8083815e0427/1a2a66f3-1fe8-4b26-a3e2-8083815e0427.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2119.252933] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-162bf634-8199-4bc0-a77b-af7192b5dad7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.259192] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2119.259192] env[62619]: value = "task-1778798" [ 2119.259192] env[62619]: _type = "Task" [ 2119.259192] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.266464] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778798, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.699934] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]522578de-1853-3dd2-16fe-b78f35a73cf6, 'name': SearchDatastore_Task, 'duration_secs': 0.011078} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.700685] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ae16753-ec5e-4d25-a270-cc962a039200 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.705881] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2119.705881] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526368e1-a84c-187d-47c3-94bce58671b9" [ 2119.705881] env[62619]: _type = "Task" [ 2119.705881] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.713137] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526368e1-a84c-187d-47c3-94bce58671b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.766764] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778798, 'name': Rename_Task, 'duration_secs': 0.138035} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.767014] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2119.767243] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61d7b002-a9c3-4fd4-a033-a2018b636004 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.772717] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2119.772717] env[62619]: value = "task-1778799" [ 2119.772717] env[62619]: _type = "Task" [ 2119.772717] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.780883] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.217084] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526368e1-a84c-187d-47c3-94bce58671b9, 'name': SearchDatastore_Task, 'duration_secs': 0.009898} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.217456] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.217657] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 98b58e56-1249-42e7-a421-c836340cc4d4/98b58e56-1249-42e7-a421-c836340cc4d4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2120.217935] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2160b749-7358-4fc9-b0f6-f3ef7658f513 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.224042] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2120.224042] env[62619]: value = "task-1778800" [ 2120.224042] env[62619]: _type = "Task" [ 2120.224042] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.231750] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778800, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.280872] env[62619]: DEBUG oslo_vmware.api [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778799, 'name': PowerOnVM_Task, 'duration_secs': 0.403256} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.281140] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2120.281370] env[62619]: INFO nova.compute.manager [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Took 8.78 seconds to spawn the instance on the hypervisor. [ 2120.281559] env[62619]: DEBUG nova.compute.manager [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2120.282369] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa148d7-8106-40ad-8fa0-26275a4b06c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.709044] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2120.710934] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 2120.738534] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778800, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.803193] env[62619]: INFO nova.compute.manager [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Took 14.93 seconds to build instance. [ 2121.202233] env[62619]: DEBUG nova.compute.manager [req-a5184321-c8ec-4ae8-b053-43d68ddd41c3 req-5bca3c9e-c0e5-4628-8c13-c8748a53db54 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Received event network-changed-f8d4076b-f0ea-4483-a42a-79e288c9c6ca {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2121.202474] env[62619]: DEBUG nova.compute.manager [req-a5184321-c8ec-4ae8-b053-43d68ddd41c3 req-5bca3c9e-c0e5-4628-8c13-c8748a53db54 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Refreshing instance network info cache due to event network-changed-f8d4076b-f0ea-4483-a42a-79e288c9c6ca. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2121.202736] env[62619]: DEBUG oslo_concurrency.lockutils [req-a5184321-c8ec-4ae8-b053-43d68ddd41c3 req-5bca3c9e-c0e5-4628-8c13-c8748a53db54 service nova] Acquiring lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.202851] env[62619]: DEBUG oslo_concurrency.lockutils [req-a5184321-c8ec-4ae8-b053-43d68ddd41c3 req-5bca3c9e-c0e5-4628-8c13-c8748a53db54 service nova] Acquired lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.203009] env[62619]: DEBUG nova.network.neutron [req-a5184321-c8ec-4ae8-b053-43d68ddd41c3 req-5bca3c9e-c0e5-4628-8c13-c8748a53db54 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Refreshing network info cache for port f8d4076b-f0ea-4483-a42a-79e288c9c6ca {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2121.236211] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778800, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599507} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.236526] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 98b58e56-1249-42e7-a421-c836340cc4d4/98b58e56-1249-42e7-a421-c836340cc4d4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2121.236771] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2121.237369] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90b091d0-d1cb-4154-ba32-c46e9b547b1e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.244337] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2121.244337] env[62619]: value = "task-1778801" [ 2121.244337] env[62619]: _type = "Task" [ 2121.244337] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.252497] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778801, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.305294] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b3a3334c-047b-4833-a9fb-33406186f0c6 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.436s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.755243] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778801, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072183} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.755562] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2121.756448] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cd0081-d7f5-4de7-b453-382e0011aabf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.779678] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 98b58e56-1249-42e7-a421-c836340cc4d4/98b58e56-1249-42e7-a421-c836340cc4d4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2121.780603] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e30821e2-8c48-4980-8f8c-72cccb0a1036 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.799379] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2121.799379] env[62619]: value = "task-1778802" [ 2121.799379] env[62619]: _type = "Task" [ 2121.799379] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.808158] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778802, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.009652] env[62619]: DEBUG nova.network.neutron [req-a5184321-c8ec-4ae8-b053-43d68ddd41c3 req-5bca3c9e-c0e5-4628-8c13-c8748a53db54 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updated VIF entry in instance network info cache for port f8d4076b-f0ea-4483-a42a-79e288c9c6ca. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2122.010126] env[62619]: DEBUG nova.network.neutron [req-a5184321-c8ec-4ae8-b053-43d68ddd41c3 req-5bca3c9e-c0e5-4628-8c13-c8748a53db54 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance_info_cache with network_info: [{"id": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "address": "fa:16:3e:14:50:cc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d4076b-f0", "ovs_interfaceid": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2122.309890] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778802, 'name': ReconfigVM_Task, 'duration_secs': 0.297681} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.310346] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 98b58e56-1249-42e7-a421-c836340cc4d4/98b58e56-1249-42e7-a421-c836340cc4d4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2122.310809] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77ea98ed-d7bb-454d-a2b3-cf1eb1818f6e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.317319] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2122.317319] env[62619]: value = "task-1778803" [ 2122.317319] env[62619]: _type = "Task" [ 2122.317319] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.325570] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778803, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.513477] env[62619]: DEBUG oslo_concurrency.lockutils [req-a5184321-c8ec-4ae8-b053-43d68ddd41c3 req-5bca3c9e-c0e5-4628-8c13-c8748a53db54 service nova] Releasing lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.827885] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778803, 'name': Rename_Task, 'duration_secs': 0.126664} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.828180] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2122.828429] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-965e9b13-c41d-49cc-9958-f1284e061d3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.834893] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2122.834893] env[62619]: value = "task-1778804" [ 2122.834893] env[62619]: _type = "Task" [ 2122.834893] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.842569] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.345740] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778804, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.848636] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778804, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.346699] env[62619]: DEBUG oslo_vmware.api [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778804, 'name': PowerOnVM_Task, 'duration_secs': 1.070902} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.348378] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2124.348378] env[62619]: INFO nova.compute.manager [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Took 9.02 seconds to spawn the instance on the hypervisor. [ 2124.348378] env[62619]: DEBUG nova.compute.manager [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2124.348589] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bae3e74-7a03-4448-8df4-cca9651d6e82 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.867014] env[62619]: INFO nova.compute.manager [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Took 13.69 seconds to build instance. [ 2125.369408] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2197c4e-a54b-476d-b513-5d2125ab1e4f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.196s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.518018] env[62619]: DEBUG nova.compute.manager [req-5029131b-b2b2-4e8d-8229-4c005328ec7b req-61169bb5-1a4c-4db5-895a-ff079a22080d service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Received event network-changed-862f8881-6c2c-413b-b5f6-eb389ced9b21 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2125.518352] env[62619]: DEBUG nova.compute.manager [req-5029131b-b2b2-4e8d-8229-4c005328ec7b req-61169bb5-1a4c-4db5-895a-ff079a22080d service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Refreshing instance network info cache due to event network-changed-862f8881-6c2c-413b-b5f6-eb389ced9b21. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2125.518634] env[62619]: DEBUG oslo_concurrency.lockutils [req-5029131b-b2b2-4e8d-8229-4c005328ec7b req-61169bb5-1a4c-4db5-895a-ff079a22080d service nova] Acquiring lock "refresh_cache-98b58e56-1249-42e7-a421-c836340cc4d4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2125.518845] env[62619]: DEBUG oslo_concurrency.lockutils [req-5029131b-b2b2-4e8d-8229-4c005328ec7b req-61169bb5-1a4c-4db5-895a-ff079a22080d service nova] Acquired lock "refresh_cache-98b58e56-1249-42e7-a421-c836340cc4d4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2125.519078] env[62619]: DEBUG nova.network.neutron [req-5029131b-b2b2-4e8d-8229-4c005328ec7b req-61169bb5-1a4c-4db5-895a-ff079a22080d service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Refreshing network info cache for port 862f8881-6c2c-413b-b5f6-eb389ced9b21 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2125.711543] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2125.711934] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2125.712260] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2126.233898] env[62619]: DEBUG nova.network.neutron [req-5029131b-b2b2-4e8d-8229-4c005328ec7b req-61169bb5-1a4c-4db5-895a-ff079a22080d service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Updated VIF entry in instance network info cache for port 862f8881-6c2c-413b-b5f6-eb389ced9b21. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2126.234281] env[62619]: DEBUG nova.network.neutron [req-5029131b-b2b2-4e8d-8229-4c005328ec7b req-61169bb5-1a4c-4db5-895a-ff079a22080d service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Updating instance_info_cache with network_info: [{"id": "862f8881-6c2c-413b-b5f6-eb389ced9b21", "address": "fa:16:3e:ed:e9:ee", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap862f8881-6c", "ovs_interfaceid": "862f8881-6c2c-413b-b5f6-eb389ced9b21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.708883] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2126.709209] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 2126.709209] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 2126.736486] env[62619]: DEBUG oslo_concurrency.lockutils [req-5029131b-b2b2-4e8d-8229-4c005328ec7b req-61169bb5-1a4c-4db5-895a-ff079a22080d service nova] Releasing lock "refresh_cache-98b58e56-1249-42e7-a421-c836340cc4d4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2127.250615] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2127.250774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquired lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2127.250921] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2127.251080] env[62619]: DEBUG nova.objects.instance [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lazy-loading 'info_cache' on Instance uuid 1a2a66f3-1fe8-4b26-a3e2-8083815e0427 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2128.975172] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance_info_cache with network_info: [{"id": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "address": "fa:16:3e:14:50:cc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d4076b-f0", "ovs_interfaceid": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2129.477944] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Releasing lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2129.478189] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 2129.478409] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2129.478581] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2129.478735] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2129.982230] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.982650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.982650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.982794] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2129.984055] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03debc3e-b190-4151-ba0a-e650f728ebca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.992181] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d399e62-c302-4585-a3ae-b0ee09fdef4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.005848] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3160a27b-0888-4745-b3a8-ea91c64e4553 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.011801] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9547a9-e93e-42fd-89a0-d08b875948de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.040636] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180270MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2130.040763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2130.040948] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.190048] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 1a2a66f3-1fe8-4b26-a3e2-8083815e0427 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2131.190048] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 98b58e56-1249-42e7-a421-c836340cc4d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2131.190048] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2131.190048] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2131.226039] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6999cfa0-b33d-4ee3-a71a-d5959fe65d8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.233695] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdae88aa-8ccc-445e-b62c-507fb50c7dd3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.262801] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9394797-2a60-4d02-9447-5f5c7702582c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.270120] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11baae4-61fc-45b2-b191-f0a8dddee72c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.284408] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2131.787679] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2132.292944] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2132.293350] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.252s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.289439] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2133.289666] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.544101] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "98b58e56-1249-42e7-a421-c836340cc4d4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2156.544402] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.048472] env[62619]: DEBUG nova.compute.utils [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2157.551681] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.352055] env[62619]: DEBUG nova.compute.manager [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 2158.613718] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "98b58e56-1249-42e7-a421-c836340cc4d4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.614125] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.614224] env[62619]: INFO nova.compute.manager [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Attaching volume 85b42ea8-5797-4cd9-af68-83c72279209a to /dev/sdb [ 2158.644204] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c9a81c-2226-4393-b464-0ce9da0c2da6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.651983] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed5edbb-253d-4c3e-bdf2-5fd79a0adaa3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.665232] env[62619]: DEBUG nova.virt.block_device [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Updating existing volume attachment record: 1a004afe-1a27-4f97-b69a-e5cc07303da1 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2158.874030] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.874300] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.379457] env[62619]: INFO nova.compute.claims [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2159.885411] env[62619]: INFO nova.compute.resource_tracker [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating resource usage from migration bb68b72e-100c-4998-9191-cc50a3f3247f [ 2159.931364] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f968a3ce-5b92-4a30-a797-8154499c6ec5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.938920] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb642985-d6d2-40f1-9f3d-5e9173f83592 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.968230] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc91e27-5622-4811-9d3a-aae837cb9b3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.975090] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67da0896-3dac-4ae6-8484-7f363f286ddc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.989057] env[62619]: DEBUG nova.compute.provider_tree [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2160.492369] env[62619]: DEBUG nova.scheduler.client.report [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2160.997056] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.123s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.997503] env[62619]: INFO nova.compute.manager [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Migrating [ 2161.511980] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.512215] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.512353] env[62619]: DEBUG nova.network.neutron [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2162.218401] env[62619]: DEBUG nova.network.neutron [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance_info_cache with network_info: [{"id": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "address": "fa:16:3e:14:50:cc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d4076b-f0", "ovs_interfaceid": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2162.722027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.209293] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2163.209576] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369210', 'volume_id': '85b42ea8-5797-4cd9-af68-83c72279209a', 'name': 'volume-85b42ea8-5797-4cd9-af68-83c72279209a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '98b58e56-1249-42e7-a421-c836340cc4d4', 'attached_at': '', 'detached_at': '', 'volume_id': '85b42ea8-5797-4cd9-af68-83c72279209a', 'serial': '85b42ea8-5797-4cd9-af68-83c72279209a'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2163.210478] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8fa43f-f311-4510-88d2-4d4ff77885ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.230133] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91d1bf9-b936-4484-a990-fec6603ac8b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.256378] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-85b42ea8-5797-4cd9-af68-83c72279209a/volume-85b42ea8-5797-4cd9-af68-83c72279209a.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2163.256643] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-431611b2-cbc6-4c91-8b38-8936daba3104 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.275760] env[62619]: DEBUG oslo_vmware.api [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2163.275760] env[62619]: value = "task-1778807" [ 2163.275760] env[62619]: _type = "Task" [ 2163.275760] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.283623] env[62619]: DEBUG oslo_vmware.api [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778807, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.785746] env[62619]: DEBUG oslo_vmware.api [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778807, 'name': ReconfigVM_Task, 'duration_secs': 0.321561} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.786018] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-85b42ea8-5797-4cd9-af68-83c72279209a/volume-85b42ea8-5797-4cd9-af68-83c72279209a.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2163.790637] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bbf8b78-7398-4726-a6ab-fe09ec85f345 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.804942] env[62619]: DEBUG oslo_vmware.api [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2163.804942] env[62619]: value = "task-1778808" [ 2163.804942] env[62619]: _type = "Task" [ 2163.804942] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.812538] env[62619]: DEBUG oslo_vmware.api [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778808, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.240171] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3482094-05f2-4aaa-aac9-bb7b7edbf469 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.258508] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance '1a2a66f3-1fe8-4b26-a3e2-8083815e0427' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2164.315548] env[62619]: DEBUG oslo_vmware.api [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778808, 'name': ReconfigVM_Task, 'duration_secs': 0.227111} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.315853] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369210', 'volume_id': '85b42ea8-5797-4cd9-af68-83c72279209a', 'name': 'volume-85b42ea8-5797-4cd9-af68-83c72279209a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '98b58e56-1249-42e7-a421-c836340cc4d4', 'attached_at': '', 'detached_at': '', 'volume_id': '85b42ea8-5797-4cd9-af68-83c72279209a', 'serial': '85b42ea8-5797-4cd9-af68-83c72279209a'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2164.764863] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2164.765182] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbbc3279-facc-4994-9b3f-1536d5c3f4b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.772998] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2164.772998] env[62619]: value = "task-1778809" [ 2164.772998] env[62619]: _type = "Task" [ 2164.772998] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.781906] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.283537] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778809, 'name': PowerOffVM_Task, 'duration_secs': 0.181537} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2165.285049] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2165.285049] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance '1a2a66f3-1fe8-4b26-a3e2-8083815e0427' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2165.350719] env[62619]: DEBUG nova.objects.instance [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lazy-loading 'flavor' on Instance uuid 98b58e56-1249-42e7-a421-c836340cc4d4 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2165.791311] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2165.791540] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2165.791708] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2165.791906] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2165.792061] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2165.792208] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2165.792411] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2165.792577] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2165.792764] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2165.792918] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2165.793100] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2165.797970] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02ead743-55d0-4af3-8e25-0821354af1db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.813517] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2165.813517] env[62619]: value = "task-1778810" [ 2165.813517] env[62619]: _type = "Task" [ 2165.813517] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.821257] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778810, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.855812] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de8c1fd9-3cb0-44c6-ab4d-f246c9b3434f tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.242s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2166.055882] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "98b58e56-1249-42e7-a421-c836340cc4d4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.056142] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2166.323286] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778810, 'name': ReconfigVM_Task, 'duration_secs': 0.172305} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.323665] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance '1a2a66f3-1fe8-4b26-a3e2-8083815e0427' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2166.559681] env[62619]: INFO nova.compute.manager [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Detaching volume 85b42ea8-5797-4cd9-af68-83c72279209a [ 2166.589145] env[62619]: INFO nova.virt.block_device [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Attempting to driver detach volume 85b42ea8-5797-4cd9-af68-83c72279209a from mountpoint /dev/sdb [ 2166.589383] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2166.589582] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369210', 'volume_id': '85b42ea8-5797-4cd9-af68-83c72279209a', 'name': 'volume-85b42ea8-5797-4cd9-af68-83c72279209a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '98b58e56-1249-42e7-a421-c836340cc4d4', 'attached_at': '', 'detached_at': '', 'volume_id': '85b42ea8-5797-4cd9-af68-83c72279209a', 'serial': '85b42ea8-5797-4cd9-af68-83c72279209a'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2166.590460] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece0358c-3d11-491b-9487-8659bd463523 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.611696] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b48de6b-e9bc-4d0c-a46c-713049670cdf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.618413] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb9e9d7-beb6-45ac-a617-98b01fb6483a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.638083] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c226c4d8-92fe-4311-86d8-b9a43615f5f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.653338] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] The volume has not been displaced from its original location: [datastore1] volume-85b42ea8-5797-4cd9-af68-83c72279209a/volume-85b42ea8-5797-4cd9-af68-83c72279209a.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2166.658369] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Reconfiguring VM instance instance-0000007b to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2166.658623] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed7315bf-4b30-42ef-bd65-1f5a8a1ce5da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.676287] env[62619]: DEBUG oslo_vmware.api [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2166.676287] env[62619]: value = "task-1778811" [ 2166.676287] env[62619]: _type = "Task" [ 2166.676287] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.683459] env[62619]: DEBUG oslo_vmware.api [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778811, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.830520] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2166.830790] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2166.830864] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2166.831055] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2166.831205] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2166.831353] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2166.831593] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2166.831777] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2166.831961] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2166.832143] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2166.832319] env[62619]: DEBUG nova.virt.hardware [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2166.837985] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2166.838277] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfb6edf4-f891-4449-a950-ec50e5ccb9d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.856941] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2166.856941] env[62619]: value = "task-1778812" [ 2166.856941] env[62619]: _type = "Task" [ 2166.856941] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.864565] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778812, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.186176] env[62619]: DEBUG oslo_vmware.api [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778811, 'name': ReconfigVM_Task, 'duration_secs': 0.208884} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.186421] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Reconfigured VM instance instance-0000007b to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2167.190923] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6284c50-cbf1-4475-bf93-9180adea6845 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.205364] env[62619]: DEBUG oslo_vmware.api [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2167.205364] env[62619]: value = "task-1778813" [ 2167.205364] env[62619]: _type = "Task" [ 2167.205364] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.212692] env[62619]: DEBUG oslo_vmware.api [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778813, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.366821] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778812, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.716058] env[62619]: DEBUG oslo_vmware.api [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778813, 'name': ReconfigVM_Task, 'duration_secs': 0.13172} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.716264] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369210', 'volume_id': '85b42ea8-5797-4cd9-af68-83c72279209a', 'name': 'volume-85b42ea8-5797-4cd9-af68-83c72279209a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '98b58e56-1249-42e7-a421-c836340cc4d4', 'attached_at': '', 'detached_at': '', 'volume_id': '85b42ea8-5797-4cd9-af68-83c72279209a', 'serial': '85b42ea8-5797-4cd9-af68-83c72279209a'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2167.867948] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778812, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.256948] env[62619]: DEBUG nova.objects.instance [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lazy-loading 'flavor' on Instance uuid 98b58e56-1249-42e7-a421-c836340cc4d4 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2168.368813] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778812, 'name': ReconfigVM_Task, 'duration_secs': 1.161966} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.369243] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2168.369895] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bad02ec-f92a-4818-a863-5931a99937de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.391731] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 1a2a66f3-1fe8-4b26-a3e2-8083815e0427/1a2a66f3-1fe8-4b26-a3e2-8083815e0427.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2168.392088] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56dcf1cb-f4d5-47cf-a557-44b141bc2fe3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.411058] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2168.411058] env[62619]: value = "task-1778814" [ 2168.411058] env[62619]: _type = "Task" [ 2168.411058] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.419190] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778814, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.921529] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778814, 'name': ReconfigVM_Task, 'duration_secs': 0.287973} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.921835] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 1a2a66f3-1fe8-4b26-a3e2-8083815e0427/1a2a66f3-1fe8-4b26-a3e2-8083815e0427.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2168.922101] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance '1a2a66f3-1fe8-4b26-a3e2-8083815e0427' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2169.265569] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3abba424-1d51-476d-9db5-eddbe4a7a648 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.209s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.428651] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8272152-84ba-42a9-a5aa-31cc9e361e49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.448494] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d094f17a-8ae3-4338-8b73-722721959e05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.466113] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance '1a2a66f3-1fe8-4b26-a3e2-8083815e0427' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2170.004484] env[62619]: DEBUG nova.network.neutron [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Port f8d4076b-f0ea-4483-a42a-79e288c9c6ca binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2170.288946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "98b58e56-1249-42e7-a421-c836340cc4d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2170.289229] env[62619]: DEBUG oslo_concurrency.lockutils [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2170.289436] env[62619]: DEBUG oslo_concurrency.lockutils [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "98b58e56-1249-42e7-a421-c836340cc4d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2170.289649] env[62619]: DEBUG oslo_concurrency.lockutils [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2170.289824] env[62619]: DEBUG oslo_concurrency.lockutils [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2170.291888] env[62619]: INFO nova.compute.manager [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Terminating instance [ 2170.795893] env[62619]: DEBUG nova.compute.manager [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2170.796282] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2170.797067] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d2fbc2-9f2c-4ddc-a04d-62fdeccabe4f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.805888] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2170.806088] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68516d76-aac8-4320-85e3-24efd6cac483 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.811590] env[62619]: DEBUG oslo_vmware.api [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2170.811590] env[62619]: value = "task-1778815" [ 2170.811590] env[62619]: _type = "Task" [ 2170.811590] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.820149] env[62619]: DEBUG oslo_vmware.api [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778815, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.025550] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2171.025771] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2171.025939] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.321936] env[62619]: DEBUG oslo_vmware.api [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778815, 'name': PowerOffVM_Task, 'duration_secs': 0.1807} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.322213] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2171.322380] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2171.322628] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-712b231e-51ad-44b2-bff8-7478e5e43d8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.417554] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2171.417738] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2171.417891] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleting the datastore file [datastore1] 98b58e56-1249-42e7-a421-c836340cc4d4 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2171.418147] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e370600-dd5e-4116-986b-62f583d61077 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.423847] env[62619]: DEBUG oslo_vmware.api [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2171.423847] env[62619]: value = "task-1778817" [ 2171.423847] env[62619]: _type = "Task" [ 2171.423847] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.430892] env[62619]: DEBUG oslo_vmware.api [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.934176] env[62619]: DEBUG oslo_vmware.api [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135183} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.934589] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2171.934681] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2171.934827] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2171.935015] env[62619]: INFO nova.compute.manager [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2171.935278] env[62619]: DEBUG oslo.service.loopingcall [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2171.935459] env[62619]: DEBUG nova.compute.manager [-] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2171.935553] env[62619]: DEBUG nova.network.neutron [-] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2172.118283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2172.118283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2172.118283] env[62619]: DEBUG nova.network.neutron [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2172.555018] env[62619]: DEBUG nova.compute.manager [req-9954ed0b-6a02-4a91-8ef0-1412b37999d2 req-e294a1b6-e30a-4448-b8e6-7fc740b7d18e service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Received event network-vif-deleted-862f8881-6c2c-413b-b5f6-eb389ced9b21 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2172.555018] env[62619]: INFO nova.compute.manager [req-9954ed0b-6a02-4a91-8ef0-1412b37999d2 req-e294a1b6-e30a-4448-b8e6-7fc740b7d18e service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Neutron deleted interface 862f8881-6c2c-413b-b5f6-eb389ced9b21; detaching it from the instance and deleting it from the info cache [ 2172.555018] env[62619]: DEBUG nova.network.neutron [req-9954ed0b-6a02-4a91-8ef0-1412b37999d2 req-e294a1b6-e30a-4448-b8e6-7fc740b7d18e service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2172.802090] env[62619]: DEBUG nova.network.neutron [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance_info_cache with network_info: [{"id": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "address": "fa:16:3e:14:50:cc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d4076b-f0", "ovs_interfaceid": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2173.035296] env[62619]: DEBUG nova.network.neutron [-] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2173.057544] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ba0b4271-0673-435d-8678-29f08566df13 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.068172] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5d7a47-59f2-4a28-aa82-772c5d85f922 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.092890] env[62619]: DEBUG nova.compute.manager [req-9954ed0b-6a02-4a91-8ef0-1412b37999d2 req-e294a1b6-e30a-4448-b8e6-7fc740b7d18e service nova] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Detach interface failed, port_id=862f8881-6c2c-413b-b5f6-eb389ced9b21, reason: Instance 98b58e56-1249-42e7-a421-c836340cc4d4 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2173.305361] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2173.537903] env[62619]: INFO nova.compute.manager [-] [instance: 98b58e56-1249-42e7-a421-c836340cc4d4] Took 1.60 seconds to deallocate network for instance. [ 2173.829776] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9680bfc-3c2f-4a6e-b7ff-7c087bf094ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.848192] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a309fe22-1b1d-4e90-b9fc-843d25d43222 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.855174] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance '1a2a66f3-1fe8-4b26-a3e2-8083815e0427' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2174.044066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.044434] env[62619]: DEBUG oslo_concurrency.lockutils [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2174.044573] env[62619]: DEBUG nova.objects.instance [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lazy-loading 'resources' on Instance uuid 98b58e56-1249-42e7-a421-c836340cc4d4 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2174.360949] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2174.361272] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8f1238d-4856-4038-92bc-cc1f1806f205 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.368895] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2174.368895] env[62619]: value = "task-1778818" [ 2174.368895] env[62619]: _type = "Task" [ 2174.368895] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.376375] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.595268] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa99d4b-75da-4f5b-b82a-b8a915345a11 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.602716] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22149ce-00af-4e8d-9d94-213145a8cd49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.634265] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c7551d-34fd-41b7-9dca-0d178f692028 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.641877] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fe7934-da0b-477f-912a-72fb0076b966 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.655152] env[62619]: DEBUG nova.compute.provider_tree [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2174.878789] env[62619]: DEBUG oslo_vmware.api [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778818, 'name': PowerOnVM_Task, 'duration_secs': 0.374622} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.879043] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2174.879185] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c66ad0-8636-4946-b674-4baa14b0dad8 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance '1a2a66f3-1fe8-4b26-a3e2-8083815e0427' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2175.158847] env[62619]: DEBUG nova.scheduler.client.report [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2175.663602] env[62619]: DEBUG oslo_concurrency.lockutils [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.619s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2175.681378] env[62619]: INFO nova.scheduler.client.report [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleted allocations for instance 98b58e56-1249-42e7-a421-c836340cc4d4 [ 2176.189313] env[62619]: DEBUG oslo_concurrency.lockutils [None req-12d7d597-cbe9-4c1d-9e0c-0aa486dadb23 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "98b58e56-1249-42e7-a421-c836340cc4d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.900s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2176.817098] env[62619]: DEBUG nova.network.neutron [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Port f8d4076b-f0ea-4483-a42a-79e288c9c6ca binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2176.817385] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2176.817538] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2176.817716] env[62619]: DEBUG nova.network.neutron [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2177.533581] env[62619]: DEBUG nova.network.neutron [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance_info_cache with network_info: [{"id": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "address": "fa:16:3e:14:50:cc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d4076b-f0", "ovs_interfaceid": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2178.319088] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2178.322903] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.323119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.824283] env[62619]: DEBUG nova.compute.manager [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62619) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 2178.824617] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.824732] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.826129] env[62619]: DEBUG nova.compute.manager [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2179.330652] env[62619]: DEBUG nova.objects.instance [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'migration_context' on Instance uuid 1a2a66f3-1fe8-4b26-a3e2-8083815e0427 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2179.348405] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.884430] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0669a7bd-0e3d-468c-a0d1-57c14ab89afc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.892186] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62421f4-464f-4092-b34c-723749a53dd3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.921761] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5d00f2-3001-4a57-b7f5-167046618f8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.928642] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd38f9aa-1faa-45dc-a7f8-2baf80d92f63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.940972] env[62619]: DEBUG nova.compute.provider_tree [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2180.444722] env[62619]: DEBUG nova.scheduler.client.report [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2180.709312] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2180.709486] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 2181.456811] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.632s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.462988] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.115s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.464478] env[62619]: INFO nova.compute.claims [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2182.525392] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe434754-57eb-42b6-bce0-aaa987a73912 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.533548] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212d5794-209d-4944-8301-76bba2270da3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.563354] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abdf77b-8b85-4b27-91f7-b640bbafd55c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.570367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233288f1-978d-4164-8c90-acb86564c7c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.583265] env[62619]: DEBUG nova.compute.provider_tree [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2182.998329] env[62619]: INFO nova.compute.manager [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Swapping old allocation on dict_keys(['e814b747-ed75-487b-a97d-acf66bc6db0b']) held by migration bb68b72e-100c-4998-9191-cc50a3f3247f for instance [ 2183.020101] env[62619]: DEBUG nova.scheduler.client.report [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Overwriting current allocation {'allocations': {'e814b747-ed75-487b-a97d-acf66bc6db0b': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 187}}, 'project_id': 'a2c7470712c14fa9bc1804ae2431107b', 'user_id': '53deb71781e14136bff2b0b6c6a82890', 'consumer_generation': 1} on consumer 1a2a66f3-1fe8-4b26-a3e2-8083815e0427 {{(pid=62619) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 2183.085872] env[62619]: DEBUG nova.scheduler.client.report [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2183.091970] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2183.092149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2183.092318] env[62619]: DEBUG nova.network.neutron [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2183.590815] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.128s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2183.591365] env[62619]: DEBUG nova.compute.manager [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2183.792107] env[62619]: DEBUG nova.network.neutron [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance_info_cache with network_info: [{"id": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "address": "fa:16:3e:14:50:cc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8d4076b-f0", "ovs_interfaceid": "f8d4076b-f0ea-4483-a42a-79e288c9c6ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2184.098934] env[62619]: DEBUG nova.compute.utils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2184.100472] env[62619]: DEBUG nova.compute.manager [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2184.100650] env[62619]: DEBUG nova.network.neutron [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2184.139264] env[62619]: DEBUG nova.policy [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a13407c9bfc448bb27a06680d41afb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72ae04936c9b4ea19b5d7fac78c96ba4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 2184.295243] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-1a2a66f3-1fe8-4b26-a3e2-8083815e0427" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2184.295700] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2184.296244] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8a5ac63-50df-4f9b-a1b7-27771aae78d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.304467] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2184.304467] env[62619]: value = "task-1778819" [ 2184.304467] env[62619]: _type = "Task" [ 2184.304467] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.312218] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778819, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.411975] env[62619]: DEBUG nova.network.neutron [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Successfully created port: 13b4d2ff-6fc8-4ea9-9853-4fa6022969a8 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2184.605011] env[62619]: DEBUG nova.compute.manager [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2184.813890] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778819, 'name': PowerOffVM_Task, 'duration_secs': 0.220754} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.814112] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2184.814752] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2184.814965] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2184.815189] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2184.815417] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2184.815568] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2184.815716] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2184.815916] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2184.816085] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2184.816249] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2184.816404] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2184.816570] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2184.821378] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4da0deb-4fb2-4c05-8bf6-5f43555ad4f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.836535] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2184.836535] env[62619]: value = "task-1778820" [ 2184.836535] env[62619]: _type = "Task" [ 2184.836535] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.844025] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778820, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.347223] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778820, 'name': ReconfigVM_Task, 'duration_secs': 0.142469} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.347975] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801b3aa7-7dab-4869-b400-021dc43f2e67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.366055] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2185.366292] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2185.366447] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2185.366625] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2185.366771] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2185.366920] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2185.367132] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2185.367293] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2185.367456] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2185.367615] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2185.367786] env[62619]: DEBUG nova.virt.hardware [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2185.368561] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f4ad16c-a708-4ba1-887a-9d455100c2ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.373790] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2185.373790] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d84fc0-7e04-f52a-0213-371c52b5e416" [ 2185.373790] env[62619]: _type = "Task" [ 2185.373790] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.381222] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d84fc0-7e04-f52a-0213-371c52b5e416, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.615029] env[62619]: DEBUG nova.compute.manager [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2185.642073] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2185.642375] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2185.642554] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2185.642761] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2185.643058] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2185.643131] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2185.643289] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2185.643452] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2185.643610] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2185.643769] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2185.643939] env[62619]: DEBUG nova.virt.hardware [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2185.644804] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8107a2-0e9f-40b8-95bc-d9c0a437419b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.652740] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6000980-044d-4f8d-b9c4-f0d04c6b2f8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.709170] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2185.802650] env[62619]: DEBUG nova.compute.manager [req-c27c2740-9312-4692-956d-edb37041d3cf req-ef8943b5-b5dd-4806-979a-9159c90500af service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Received event network-vif-plugged-13b4d2ff-6fc8-4ea9-9853-4fa6022969a8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2185.802810] env[62619]: DEBUG oslo_concurrency.lockutils [req-c27c2740-9312-4692-956d-edb37041d3cf req-ef8943b5-b5dd-4806-979a-9159c90500af service nova] Acquiring lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2185.802941] env[62619]: DEBUG oslo_concurrency.lockutils [req-c27c2740-9312-4692-956d-edb37041d3cf req-ef8943b5-b5dd-4806-979a-9159c90500af service nova] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2185.803131] env[62619]: DEBUG oslo_concurrency.lockutils [req-c27c2740-9312-4692-956d-edb37041d3cf req-ef8943b5-b5dd-4806-979a-9159c90500af service nova] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2185.803303] env[62619]: DEBUG nova.compute.manager [req-c27c2740-9312-4692-956d-edb37041d3cf req-ef8943b5-b5dd-4806-979a-9159c90500af service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] No waiting events found dispatching network-vif-plugged-13b4d2ff-6fc8-4ea9-9853-4fa6022969a8 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2185.803480] env[62619]: WARNING nova.compute.manager [req-c27c2740-9312-4692-956d-edb37041d3cf req-ef8943b5-b5dd-4806-979a-9159c90500af service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Received unexpected event network-vif-plugged-13b4d2ff-6fc8-4ea9-9853-4fa6022969a8 for instance with vm_state building and task_state spawning. [ 2185.884758] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52d84fc0-7e04-f52a-0213-371c52b5e416, 'name': SearchDatastore_Task, 'duration_secs': 0.008374} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.885681] env[62619]: DEBUG nova.network.neutron [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Successfully updated port: 13b4d2ff-6fc8-4ea9-9853-4fa6022969a8 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2185.893885] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2185.894235] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14b45ca8-d806-4a1b-8aba-af8a649e005b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.914773] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2185.914773] env[62619]: value = "task-1778821" [ 2185.914773] env[62619]: _type = "Task" [ 2185.914773] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.923276] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778821, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.388908] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "refresh_cache-e165e8ff-4f48-4047-bb29-d77d4e0b40aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2186.389119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "refresh_cache-e165e8ff-4f48-4047-bb29-d77d4e0b40aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2186.389194] env[62619]: DEBUG nova.network.neutron [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2186.425793] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778821, 'name': ReconfigVM_Task, 'duration_secs': 0.17952} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.425793] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2186.425793] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3fb3e5-e921-4337-a65f-cea743967b9e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.447047] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 1a2a66f3-1fe8-4b26-a3e2-8083815e0427/1a2a66f3-1fe8-4b26-a3e2-8083815e0427.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2186.447347] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-887e7e7f-4fdf-4860-9f3a-1b697a14e832 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.465971] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2186.465971] env[62619]: value = "task-1778822" [ 2186.465971] env[62619]: _type = "Task" [ 2186.465971] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.473454] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778822, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.709127] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.709461] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 2186.920838] env[62619]: DEBUG nova.network.neutron [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2186.975692] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778822, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.051221] env[62619]: DEBUG nova.network.neutron [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Updating instance_info_cache with network_info: [{"id": "13b4d2ff-6fc8-4ea9-9853-4fa6022969a8", "address": "fa:16:3e:22:78:f0", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13b4d2ff-6f", "ovs_interfaceid": "13b4d2ff-6fc8-4ea9-9853-4fa6022969a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2187.212419] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 2187.212659] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2187.212880] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2187.476789] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778822, 'name': ReconfigVM_Task, 'duration_secs': 0.807689} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.477085] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 1a2a66f3-1fe8-4b26-a3e2-8083815e0427/1a2a66f3-1fe8-4b26-a3e2-8083815e0427.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2187.477920] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3956bb-9d4b-410b-adb9-9cfe6309a4cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.495644] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daafbb9-f679-4b1f-9c09-671d77027118 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.513134] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7296237e-bc16-4944-b217-bfd7f9828bf3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.531670] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696fb47a-0145-418f-87c3-a50e3f58804f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.538422] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2187.538647] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5b9f3d7-4380-4a1f-ba99-38661ab8b57e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.543888] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2187.543888] env[62619]: value = "task-1778823" [ 2187.543888] env[62619]: _type = "Task" [ 2187.543888] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.550746] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778823, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.554257] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "refresh_cache-e165e8ff-4f48-4047-bb29-d77d4e0b40aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.554534] env[62619]: DEBUG nova.compute.manager [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Instance network_info: |[{"id": "13b4d2ff-6fc8-4ea9-9853-4fa6022969a8", "address": "fa:16:3e:22:78:f0", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13b4d2ff-6f", "ovs_interfaceid": "13b4d2ff-6fc8-4ea9-9853-4fa6022969a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2187.554952] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:78:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13b4d2ff-6fc8-4ea9-9853-4fa6022969a8', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2187.562205] env[62619]: DEBUG oslo.service.loopingcall [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2187.562397] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2187.562596] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c601661-8e54-4deb-8b0f-11a2a9794158 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.581084] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2187.581084] env[62619]: value = "task-1778824" [ 2187.581084] env[62619]: _type = "Task" [ 2187.581084] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.588078] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778824, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.709022] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2187.830548] env[62619]: DEBUG nova.compute.manager [req-895c128d-66f5-4349-ac0d-97b9d85df7ed req-f6aa5908-0ddf-4129-85da-abb64b1ac9cf service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Received event network-changed-13b4d2ff-6fc8-4ea9-9853-4fa6022969a8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2187.831092] env[62619]: DEBUG nova.compute.manager [req-895c128d-66f5-4349-ac0d-97b9d85df7ed req-f6aa5908-0ddf-4129-85da-abb64b1ac9cf service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Refreshing instance network info cache due to event network-changed-13b4d2ff-6fc8-4ea9-9853-4fa6022969a8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2187.831092] env[62619]: DEBUG oslo_concurrency.lockutils [req-895c128d-66f5-4349-ac0d-97b9d85df7ed req-f6aa5908-0ddf-4129-85da-abb64b1ac9cf service nova] Acquiring lock "refresh_cache-e165e8ff-4f48-4047-bb29-d77d4e0b40aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2187.831279] env[62619]: DEBUG oslo_concurrency.lockutils [req-895c128d-66f5-4349-ac0d-97b9d85df7ed req-f6aa5908-0ddf-4129-85da-abb64b1ac9cf service nova] Acquired lock "refresh_cache-e165e8ff-4f48-4047-bb29-d77d4e0b40aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2187.831461] env[62619]: DEBUG nova.network.neutron [req-895c128d-66f5-4349-ac0d-97b9d85df7ed req-f6aa5908-0ddf-4129-85da-abb64b1ac9cf service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Refreshing network info cache for port 13b4d2ff-6fc8-4ea9-9853-4fa6022969a8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2188.053331] env[62619]: DEBUG oslo_vmware.api [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778823, 'name': PowerOnVM_Task, 'duration_secs': 0.352015} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.053585] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2188.089444] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778824, 'name': CreateVM_Task, 'duration_secs': 0.29594} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.089640] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2188.090228] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2188.090392] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2188.090715] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2188.090953] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9420bf55-2ae6-4c3b-b825-a275f8848012 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.096048] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2188.096048] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521ebfc9-54e8-7ae1-4272-88a0f1e37fd1" [ 2188.096048] env[62619]: _type = "Task" [ 2188.096048] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.102965] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521ebfc9-54e8-7ae1-4272-88a0f1e37fd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.524436] env[62619]: DEBUG nova.network.neutron [req-895c128d-66f5-4349-ac0d-97b9d85df7ed req-f6aa5908-0ddf-4129-85da-abb64b1ac9cf service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Updated VIF entry in instance network info cache for port 13b4d2ff-6fc8-4ea9-9853-4fa6022969a8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2188.524807] env[62619]: DEBUG nova.network.neutron [req-895c128d-66f5-4349-ac0d-97b9d85df7ed req-f6aa5908-0ddf-4129-85da-abb64b1ac9cf service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Updating instance_info_cache with network_info: [{"id": "13b4d2ff-6fc8-4ea9-9853-4fa6022969a8", "address": "fa:16:3e:22:78:f0", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13b4d2ff-6f", "ovs_interfaceid": "13b4d2ff-6fc8-4ea9-9853-4fa6022969a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2188.607041] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]521ebfc9-54e8-7ae1-4272-88a0f1e37fd1, 'name': SearchDatastore_Task, 'duration_secs': 0.008941} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.607360] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2188.607588] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2188.607812] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2188.607959] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2188.608157] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2188.608409] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce491f51-1f4e-436f-ae03-561cc63e421c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.616739] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2188.616899] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2188.617597] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efc66482-a995-4cb4-9b01-85b78ceb5bda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.622540] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2188.622540] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ee0030-dafa-f13f-ff1c-caad0243433e" [ 2188.622540] env[62619]: _type = "Task" [ 2188.622540] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.629370] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ee0030-dafa-f13f-ff1c-caad0243433e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.027891] env[62619]: DEBUG oslo_concurrency.lockutils [req-895c128d-66f5-4349-ac0d-97b9d85df7ed req-f6aa5908-0ddf-4129-85da-abb64b1ac9cf service nova] Releasing lock "refresh_cache-e165e8ff-4f48-4047-bb29-d77d4e0b40aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2189.063757] env[62619]: INFO nova.compute.manager [None req-4a8b1a5c-021a-476f-afb1-7dd3baed64de tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance to original state: 'active' [ 2189.131997] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52ee0030-dafa-f13f-ff1c-caad0243433e, 'name': SearchDatastore_Task, 'duration_secs': 0.008267} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.132765] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df9f2ab1-774d-4a96-8da3-f0dcc3d46aef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.137458] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2189.137458] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528ecbce-af60-cf35-f3cd-9597996c46bf" [ 2189.137458] env[62619]: _type = "Task" [ 2189.137458] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.144648] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528ecbce-af60-cf35-f3cd-9597996c46bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.648290] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]528ecbce-af60-cf35-f3cd-9597996c46bf, 'name': SearchDatastore_Task, 'duration_secs': 0.009041} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.648580] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2189.648787] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e165e8ff-4f48-4047-bb29-d77d4e0b40aa/e165e8ff-4f48-4047-bb29-d77d4e0b40aa.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2189.649147] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfa4f496-cca7-435c-ac0d-3496343f2a24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.655086] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2189.655086] env[62619]: value = "task-1778825" [ 2189.655086] env[62619]: _type = "Task" [ 2189.655086] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.662126] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778825, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.709148] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.709356] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2190.164690] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778825, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508073} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.165103] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] e165e8ff-4f48-4047-bb29-d77d4e0b40aa/e165e8ff-4f48-4047-bb29-d77d4e0b40aa.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2190.165191] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2190.165428] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d35539c-58cc-421d-bb7e-04ba26f0c68c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.171501] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2190.171501] env[62619]: value = "task-1778826" [ 2190.171501] env[62619]: _type = "Task" [ 2190.171501] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.179138] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778826, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.212356] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.212593] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2190.212768] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.212919] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2190.213789] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d39abb-0b06-4200-b124-e8fc24eefe52 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.221415] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9f0236-0299-4de2-a4ef-50b700c956d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.236266] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2620280d-e46a-4ac9-ba26-68118120c7ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.242543] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca858119-0785-45d2-846b-66d4de80c803 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.272062] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181318MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2190.272212] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.272394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2190.507596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.507726] env[62619]: DEBUG oslo_concurrency.lockutils [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2190.507957] env[62619]: DEBUG oslo_concurrency.lockutils [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.508117] env[62619]: DEBUG oslo_concurrency.lockutils [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2190.508329] env[62619]: DEBUG oslo_concurrency.lockutils [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.510571] env[62619]: INFO nova.compute.manager [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Terminating instance [ 2190.681110] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778826, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062159} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.681383] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2190.682171] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ba95a9-5e32-4c4e-93b8-52838fe698ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.703857] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] e165e8ff-4f48-4047-bb29-d77d4e0b40aa/e165e8ff-4f48-4047-bb29-d77d4e0b40aa.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2190.704122] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-278bab00-1335-425d-8c58-f6a549ca0cdc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.724573] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2190.724573] env[62619]: value = "task-1778827" [ 2190.724573] env[62619]: _type = "Task" [ 2190.724573] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.732127] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778827, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.014875] env[62619]: DEBUG nova.compute.manager [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2191.015259] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2191.016256] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf7c118-f618-4bfa-99ea-a11d972859ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.024909] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2191.025219] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fae3148-c816-484b-936f-14cff2a5f8ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.031174] env[62619]: DEBUG oslo_vmware.api [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2191.031174] env[62619]: value = "task-1778828" [ 2191.031174] env[62619]: _type = "Task" [ 2191.031174] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.039488] env[62619]: DEBUG oslo_vmware.api [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778828, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.235078] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778827, 'name': ReconfigVM_Task, 'duration_secs': 0.460867} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.235468] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Reconfigured VM instance instance-0000007c to attach disk [datastore1] e165e8ff-4f48-4047-bb29-d77d4e0b40aa/e165e8ff-4f48-4047-bb29-d77d4e0b40aa.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2191.236039] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-848dc1dd-a673-4f59-a43a-1562dc94748d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.242846] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2191.242846] env[62619]: value = "task-1778829" [ 2191.242846] env[62619]: _type = "Task" [ 2191.242846] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.252442] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778829, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.300805] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance e165e8ff-4f48-4047-bb29-d77d4e0b40aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2191.300961] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 1a2a66f3-1fe8-4b26-a3e2-8083815e0427 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2191.301148] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2191.301284] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2191.337198] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d635cc7e-7229-46bf-b027-a3396af231e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.344461] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a907c5e5-7223-4279-9e91-55f7ac74054a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.373845] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab6c9a3-3e9b-4a5c-b0ce-cd90c0aad5cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.381064] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e71840-7e9c-4257-b998-a7e60942d5d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.394061] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2191.540721] env[62619]: DEBUG oslo_vmware.api [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778828, 'name': PowerOffVM_Task, 'duration_secs': 0.238172} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.540967] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2191.541226] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2191.541557] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5525e3ae-76b7-4504-b725-d0444ec1a3d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.629015] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2191.629264] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2191.629429] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleting the datastore file [datastore1] 1a2a66f3-1fe8-4b26-a3e2-8083815e0427 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2191.629685] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e52d0ddb-a209-45e0-9de2-e5cea9e7fba5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.636502] env[62619]: DEBUG oslo_vmware.api [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2191.636502] env[62619]: value = "task-1778831" [ 2191.636502] env[62619]: _type = "Task" [ 2191.636502] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.644045] env[62619]: DEBUG oslo_vmware.api [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778831, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.752474] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778829, 'name': Rename_Task, 'duration_secs': 0.143588} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.752726] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2191.752972] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3cb2a32-fc3a-4885-a06c-87d1d1f4ceb3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.759544] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2191.759544] env[62619]: value = "task-1778832" [ 2191.759544] env[62619]: _type = "Task" [ 2191.759544] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.769315] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778832, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.897712] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2192.146643] env[62619]: DEBUG oslo_vmware.api [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778831, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13994} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2192.146908] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2192.147133] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2192.147329] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2192.147509] env[62619]: INFO nova.compute.manager [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2192.147747] env[62619]: DEBUG oslo.service.loopingcall [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2192.147940] env[62619]: DEBUG nova.compute.manager [-] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2192.148032] env[62619]: DEBUG nova.network.neutron [-] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2192.269279] env[62619]: DEBUG oslo_vmware.api [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778832, 'name': PowerOnVM_Task, 'duration_secs': 0.455196} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2192.269552] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2192.269732] env[62619]: INFO nova.compute.manager [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Took 6.65 seconds to spawn the instance on the hypervisor. [ 2192.269915] env[62619]: DEBUG nova.compute.manager [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2192.270661] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6191ef-e813-4778-baba-2bbeed7fa291 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.402578] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2192.402750] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.130s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.597539] env[62619]: DEBUG nova.compute.manager [req-b2a4cfa3-457d-402f-8e93-10471990f7d8 req-47e77e05-7347-48e9-aedc-467b96701183 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Received event network-vif-deleted-f8d4076b-f0ea-4483-a42a-79e288c9c6ca {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2192.597922] env[62619]: INFO nova.compute.manager [req-b2a4cfa3-457d-402f-8e93-10471990f7d8 req-47e77e05-7347-48e9-aedc-467b96701183 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Neutron deleted interface f8d4076b-f0ea-4483-a42a-79e288c9c6ca; detaching it from the instance and deleting it from the info cache [ 2192.598044] env[62619]: DEBUG nova.network.neutron [req-b2a4cfa3-457d-402f-8e93-10471990f7d8 req-47e77e05-7347-48e9-aedc-467b96701183 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2192.793917] env[62619]: INFO nova.compute.manager [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Took 13.46 seconds to build instance. [ 2193.075125] env[62619]: DEBUG nova.network.neutron [-] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2193.101075] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54563aa6-fb70-472b-8a5f-c385703e00fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.111055] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37165923-cc11-4790-a913-3de723c04cf9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.135426] env[62619]: DEBUG nova.compute.manager [req-b2a4cfa3-457d-402f-8e93-10471990f7d8 req-47e77e05-7347-48e9-aedc-467b96701183 service nova] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Detach interface failed, port_id=f8d4076b-f0ea-4483-a42a-79e288c9c6ca, reason: Instance 1a2a66f3-1fe8-4b26-a3e2-8083815e0427 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2193.295922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-988ade0b-d2b6-41a1-95ad-57bac579abd6 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.973s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.532990] env[62619]: DEBUG nova.compute.manager [req-4a2e4fa1-2189-492d-ae49-c7ca0a69a5e7 req-8dd4186d-0924-40c3-b518-6f5406951c8b service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Received event network-changed-13b4d2ff-6fc8-4ea9-9853-4fa6022969a8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2193.533210] env[62619]: DEBUG nova.compute.manager [req-4a2e4fa1-2189-492d-ae49-c7ca0a69a5e7 req-8dd4186d-0924-40c3-b518-6f5406951c8b service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Refreshing instance network info cache due to event network-changed-13b4d2ff-6fc8-4ea9-9853-4fa6022969a8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2193.533422] env[62619]: DEBUG oslo_concurrency.lockutils [req-4a2e4fa1-2189-492d-ae49-c7ca0a69a5e7 req-8dd4186d-0924-40c3-b518-6f5406951c8b service nova] Acquiring lock "refresh_cache-e165e8ff-4f48-4047-bb29-d77d4e0b40aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2193.533562] env[62619]: DEBUG oslo_concurrency.lockutils [req-4a2e4fa1-2189-492d-ae49-c7ca0a69a5e7 req-8dd4186d-0924-40c3-b518-6f5406951c8b service nova] Acquired lock "refresh_cache-e165e8ff-4f48-4047-bb29-d77d4e0b40aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2193.533719] env[62619]: DEBUG nova.network.neutron [req-4a2e4fa1-2189-492d-ae49-c7ca0a69a5e7 req-8dd4186d-0924-40c3-b518-6f5406951c8b service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Refreshing network info cache for port 13b4d2ff-6fc8-4ea9-9853-4fa6022969a8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2193.578092] env[62619]: INFO nova.compute.manager [-] [instance: 1a2a66f3-1fe8-4b26-a3e2-8083815e0427] Took 1.43 seconds to deallocate network for instance. [ 2194.084150] env[62619]: DEBUG oslo_concurrency.lockutils [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2194.084319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2194.084540] env[62619]: DEBUG nova.objects.instance [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'resources' on Instance uuid 1a2a66f3-1fe8-4b26-a3e2-8083815e0427 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2194.236424] env[62619]: DEBUG nova.network.neutron [req-4a2e4fa1-2189-492d-ae49-c7ca0a69a5e7 req-8dd4186d-0924-40c3-b518-6f5406951c8b service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Updated VIF entry in instance network info cache for port 13b4d2ff-6fc8-4ea9-9853-4fa6022969a8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2194.236772] env[62619]: DEBUG nova.network.neutron [req-4a2e4fa1-2189-492d-ae49-c7ca0a69a5e7 req-8dd4186d-0924-40c3-b518-6f5406951c8b service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Updating instance_info_cache with network_info: [{"id": "13b4d2ff-6fc8-4ea9-9853-4fa6022969a8", "address": "fa:16:3e:22:78:f0", "network": {"id": "0fdc1539-eb35-4283-99e8-fcc5a7a64110", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2082542751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72ae04936c9b4ea19b5d7fac78c96ba4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13b4d2ff-6f", "ovs_interfaceid": "13b4d2ff-6fc8-4ea9-9853-4fa6022969a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2194.397803] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2194.629377] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b0f0d0-a376-44dc-bb42-9ea4f084f952 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.637178] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314e2606-4ddb-461d-8c11-269f4c0f4103 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.666338] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d808b7a-35f3-4bf1-9201-cb411610b7bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.673323] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c30efdd-7510-4962-95b6-7d1b1c7aff87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.685713] env[62619]: DEBUG nova.compute.provider_tree [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2194.740291] env[62619]: DEBUG oslo_concurrency.lockutils [req-4a2e4fa1-2189-492d-ae49-c7ca0a69a5e7 req-8dd4186d-0924-40c3-b518-6f5406951c8b service nova] Releasing lock "refresh_cache-e165e8ff-4f48-4047-bb29-d77d4e0b40aa" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2195.188661] env[62619]: DEBUG nova.scheduler.client.report [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2195.693218] env[62619]: DEBUG oslo_concurrency.lockutils [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.609s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.710255] env[62619]: INFO nova.scheduler.client.report [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleted allocations for instance 1a2a66f3-1fe8-4b26-a3e2-8083815e0427 [ 2196.217471] env[62619]: DEBUG oslo_concurrency.lockutils [None req-193d47d4-158a-4d7b-940e-3a6aea1abb4a tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "1a2a66f3-1fe8-4b26-a3e2-8083815e0427" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.710s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2197.553232] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "69201bea-4f3a-45ba-b408-aec1cf28387a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2197.553582] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "69201bea-4f3a-45ba-b408-aec1cf28387a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.056168] env[62619]: DEBUG nova.compute.manager [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2198.576010] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.576318] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.577806] env[62619]: INFO nova.compute.claims [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2199.621052] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa15fbad-3507-4fa3-aa46-df523515c81b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.629092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff3aaf2-24ac-497e-9c62-32adf4d66e2c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.657732] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e84e399-4f2a-4dc4-a3dc-eea43eeea19a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.664436] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01c4611-c575-4b79-af7e-dbdad5c7d216 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.678430] env[62619]: DEBUG nova.compute.provider_tree [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2200.181735] env[62619]: DEBUG nova.scheduler.client.report [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2200.686575] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.687085] env[62619]: DEBUG nova.compute.manager [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2201.193048] env[62619]: DEBUG nova.compute.utils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2201.193948] env[62619]: DEBUG nova.compute.manager [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2201.194139] env[62619]: DEBUG nova.network.neutron [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2201.245880] env[62619]: DEBUG nova.policy [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53deb71781e14136bff2b0b6c6a82890', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2c7470712c14fa9bc1804ae2431107b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:192}} [ 2201.516616] env[62619]: DEBUG nova.network.neutron [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Successfully created port: 5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2201.697624] env[62619]: DEBUG nova.compute.manager [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2202.708766] env[62619]: DEBUG nova.compute.manager [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2202.734365] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T22:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T22:34:15Z,direct_url=,disk_format='vmdk',id=27a858d5-7985-4b17-8b01-50adcd8f566c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='6c0620dc847547f18c2b3b70b01d1230',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T22:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2202.734618] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2202.734777] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2202.734957] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2202.735122] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2202.735271] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2202.735472] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2202.735628] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2202.735789] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2202.735948] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2202.736126] env[62619]: DEBUG nova.virt.hardware [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2202.736973] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9ace1f-2c79-4510-a56a-cf98d9097c96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.744866] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2c4aaf-9578-4241-ae65-e60b188181aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.882037] env[62619]: DEBUG nova.compute.manager [req-57717b51-82ba-4124-9160-d6dbd76c9722 req-42bde918-c738-41f2-af00-54007f06cb20 service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Received event network-vif-plugged-5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2202.882236] env[62619]: DEBUG oslo_concurrency.lockutils [req-57717b51-82ba-4124-9160-d6dbd76c9722 req-42bde918-c738-41f2-af00-54007f06cb20 service nova] Acquiring lock "69201bea-4f3a-45ba-b408-aec1cf28387a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2202.882448] env[62619]: DEBUG oslo_concurrency.lockutils [req-57717b51-82ba-4124-9160-d6dbd76c9722 req-42bde918-c738-41f2-af00-54007f06cb20 service nova] Lock "69201bea-4f3a-45ba-b408-aec1cf28387a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2202.882675] env[62619]: DEBUG oslo_concurrency.lockutils [req-57717b51-82ba-4124-9160-d6dbd76c9722 req-42bde918-c738-41f2-af00-54007f06cb20 service nova] Lock "69201bea-4f3a-45ba-b408-aec1cf28387a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2202.882885] env[62619]: DEBUG nova.compute.manager [req-57717b51-82ba-4124-9160-d6dbd76c9722 req-42bde918-c738-41f2-af00-54007f06cb20 service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] No waiting events found dispatching network-vif-plugged-5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2202.883080] env[62619]: WARNING nova.compute.manager [req-57717b51-82ba-4124-9160-d6dbd76c9722 req-42bde918-c738-41f2-af00-54007f06cb20 service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Received unexpected event network-vif-plugged-5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6 for instance with vm_state building and task_state spawning. [ 2202.968045] env[62619]: DEBUG nova.network.neutron [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Successfully updated port: 5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2203.474334] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.474466] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.474623] env[62619]: DEBUG nova.network.neutron [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2204.007381] env[62619]: DEBUG nova.network.neutron [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2204.129383] env[62619]: DEBUG nova.network.neutron [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updating instance_info_cache with network_info: [{"id": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "address": "fa:16:3e:9d:f0:dc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f2c43cf-7c", "ovs_interfaceid": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2204.632112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2204.632449] env[62619]: DEBUG nova.compute.manager [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Instance network_info: |[{"id": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "address": "fa:16:3e:9d:f0:dc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f2c43cf-7c", "ovs_interfaceid": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2204.632866] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:f0:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2204.640218] env[62619]: DEBUG oslo.service.loopingcall [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2204.640434] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2204.640658] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44ef5a18-bfbd-4e33-aa98-ac05b617ff2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.661068] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2204.661068] env[62619]: value = "task-1778833" [ 2204.661068] env[62619]: _type = "Task" [ 2204.661068] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.668563] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778833, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.907555] env[62619]: DEBUG nova.compute.manager [req-f698d9e1-cbea-49bd-9e61-c70f45b47b44 req-545f35fc-f808-4176-845d-e1d74fbc61ba service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Received event network-changed-5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2204.907766] env[62619]: DEBUG nova.compute.manager [req-f698d9e1-cbea-49bd-9e61-c70f45b47b44 req-545f35fc-f808-4176-845d-e1d74fbc61ba service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Refreshing instance network info cache due to event network-changed-5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2204.908020] env[62619]: DEBUG oslo_concurrency.lockutils [req-f698d9e1-cbea-49bd-9e61-c70f45b47b44 req-545f35fc-f808-4176-845d-e1d74fbc61ba service nova] Acquiring lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2204.908264] env[62619]: DEBUG oslo_concurrency.lockutils [req-f698d9e1-cbea-49bd-9e61-c70f45b47b44 req-545f35fc-f808-4176-845d-e1d74fbc61ba service nova] Acquired lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2204.908481] env[62619]: DEBUG nova.network.neutron [req-f698d9e1-cbea-49bd-9e61-c70f45b47b44 req-545f35fc-f808-4176-845d-e1d74fbc61ba service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Refreshing network info cache for port 5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2205.172203] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778833, 'name': CreateVM_Task, 'duration_secs': 0.369824} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.172573] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2205.173021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2205.173192] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2205.173497] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2205.173730] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98e873fd-67c0-4435-ab1e-08cd2dc16762 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.177963] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2205.177963] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e7b228-bbca-41f8-6a32-7db1a1376c59" [ 2205.177963] env[62619]: _type = "Task" [ 2205.177963] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.184938] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e7b228-bbca-41f8-6a32-7db1a1376c59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.586338] env[62619]: DEBUG nova.network.neutron [req-f698d9e1-cbea-49bd-9e61-c70f45b47b44 req-545f35fc-f808-4176-845d-e1d74fbc61ba service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updated VIF entry in instance network info cache for port 5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2205.586716] env[62619]: DEBUG nova.network.neutron [req-f698d9e1-cbea-49bd-9e61-c70f45b47b44 req-545f35fc-f808-4176-845d-e1d74fbc61ba service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updating instance_info_cache with network_info: [{"id": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "address": "fa:16:3e:9d:f0:dc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f2c43cf-7c", "ovs_interfaceid": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2205.688277] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52e7b228-bbca-41f8-6a32-7db1a1376c59, 'name': SearchDatastore_Task, 'duration_secs': 0.013589} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.688534] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2205.688759] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Processing image 27a858d5-7985-4b17-8b01-50adcd8f566c {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2205.688981] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2205.689145] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2205.689326] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2205.689571] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bf140d3-02b6-4b51-832d-a625f74d4fca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.697715] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2205.697875] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2205.698524] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88eb4ecc-da46-4d4c-a9dd-b42600467298 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.702913] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2205.702913] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52948726-91d9-d407-610d-eb355737d8c1" [ 2205.702913] env[62619]: _type = "Task" [ 2205.702913] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.709657] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52948726-91d9-d407-610d-eb355737d8c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.089386] env[62619]: DEBUG oslo_concurrency.lockutils [req-f698d9e1-cbea-49bd-9e61-c70f45b47b44 req-545f35fc-f808-4176-845d-e1d74fbc61ba service nova] Releasing lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.213199] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]52948726-91d9-d407-610d-eb355737d8c1, 'name': SearchDatastore_Task, 'duration_secs': 0.00773} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.213980] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8471590d-bb71-4da3-bf8c-5891c0d64044 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.219529] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2206.219529] env[62619]: value = "session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526e82b4-cabb-56a8-14ba-44891e227e80" [ 2206.219529] env[62619]: _type = "Task" [ 2206.219529] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.226832] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526e82b4-cabb-56a8-14ba-44891e227e80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.730060] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': session[52a8412c-f139-e6e9-4392-b05a5ef0a084]526e82b4-cabb-56a8-14ba-44891e227e80, 'name': SearchDatastore_Task, 'duration_secs': 0.009369} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.730347] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.730595] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 69201bea-4f3a-45ba-b408-aec1cf28387a/69201bea-4f3a-45ba-b408-aec1cf28387a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2206.730848] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5493b8bd-07ef-4b2e-857b-5a361b804055 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.737530] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2206.737530] env[62619]: value = "task-1778834" [ 2206.737530] env[62619]: _type = "Task" [ 2206.737530] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.746335] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778834, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.251027] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778834, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.748493] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778834, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584916} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.748759] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/27a858d5-7985-4b17-8b01-50adcd8f566c/27a858d5-7985-4b17-8b01-50adcd8f566c.vmdk to [datastore1] 69201bea-4f3a-45ba-b408-aec1cf28387a/69201bea-4f3a-45ba-b408-aec1cf28387a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2207.748969] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2207.749227] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6dbb7fc0-89bc-4acd-9e71-2dabaa4c886e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.755187] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2207.755187] env[62619]: value = "task-1778835" [ 2207.755187] env[62619]: _type = "Task" [ 2207.755187] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.762244] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778835, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.264792] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778835, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077241} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.265272] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2208.265782] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d467fb59-ea63-4a30-94fb-3bb4c678eec8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.286835] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 69201bea-4f3a-45ba-b408-aec1cf28387a/69201bea-4f3a-45ba-b408-aec1cf28387a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2208.287076] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4743e1f-a16d-4ce0-8cd5-611ec658f7d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.305217] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2208.305217] env[62619]: value = "task-1778836" [ 2208.305217] env[62619]: _type = "Task" [ 2208.305217] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.312866] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778836, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.815346] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778836, 'name': ReconfigVM_Task, 'duration_secs': 0.425387} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.815633] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 69201bea-4f3a-45ba-b408-aec1cf28387a/69201bea-4f3a-45ba-b408-aec1cf28387a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2208.816292] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-959038c8-ab47-4f9c-8d16-b2db9632894d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.821995] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2208.821995] env[62619]: value = "task-1778837" [ 2208.821995] env[62619]: _type = "Task" [ 2208.821995] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.829850] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778837, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.332050] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778837, 'name': Rename_Task, 'duration_secs': 0.150469} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.332370] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2209.332588] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfba9772-a9c3-4b08-9d71-a1d69ec332d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.339271] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2209.339271] env[62619]: value = "task-1778838" [ 2209.339271] env[62619]: _type = "Task" [ 2209.339271] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.346455] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.848682] env[62619]: DEBUG oslo_vmware.api [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778838, 'name': PowerOnVM_Task, 'duration_secs': 0.459013} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.848956] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2209.849144] env[62619]: INFO nova.compute.manager [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Took 7.14 seconds to spawn the instance on the hypervisor. [ 2209.849324] env[62619]: DEBUG nova.compute.manager [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2209.850055] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bcba99-d51b-447d-a36c-696b94c28733 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.370879] env[62619]: INFO nova.compute.manager [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Took 11.81 seconds to build instance. [ 2210.522609] env[62619]: DEBUG nova.compute.manager [req-0148a581-683d-4799-9e89-fbbcceb6714d req-e14cc205-103d-4a2c-9f66-e4a232622cda service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Received event network-changed-5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2210.522811] env[62619]: DEBUG nova.compute.manager [req-0148a581-683d-4799-9e89-fbbcceb6714d req-e14cc205-103d-4a2c-9f66-e4a232622cda service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Refreshing instance network info cache due to event network-changed-5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2210.523037] env[62619]: DEBUG oslo_concurrency.lockutils [req-0148a581-683d-4799-9e89-fbbcceb6714d req-e14cc205-103d-4a2c-9f66-e4a232622cda service nova] Acquiring lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2210.523184] env[62619]: DEBUG oslo_concurrency.lockutils [req-0148a581-683d-4799-9e89-fbbcceb6714d req-e14cc205-103d-4a2c-9f66-e4a232622cda service nova] Acquired lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2210.523407] env[62619]: DEBUG nova.network.neutron [req-0148a581-683d-4799-9e89-fbbcceb6714d req-e14cc205-103d-4a2c-9f66-e4a232622cda service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Refreshing network info cache for port 5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2210.873291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b27bde95-8586-47f2-9003-73e07388bad5 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "69201bea-4f3a-45ba-b408-aec1cf28387a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.319s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.238958] env[62619]: DEBUG nova.network.neutron [req-0148a581-683d-4799-9e89-fbbcceb6714d req-e14cc205-103d-4a2c-9f66-e4a232622cda service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updated VIF entry in instance network info cache for port 5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2211.239349] env[62619]: DEBUG nova.network.neutron [req-0148a581-683d-4799-9e89-fbbcceb6714d req-e14cc205-103d-4a2c-9f66-e4a232622cda service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updating instance_info_cache with network_info: [{"id": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "address": "fa:16:3e:9d:f0:dc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f2c43cf-7c", "ovs_interfaceid": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2211.741800] env[62619]: DEBUG oslo_concurrency.lockutils [req-0148a581-683d-4799-9e89-fbbcceb6714d req-e14cc205-103d-4a2c-9f66-e4a232622cda service nova] Releasing lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2230.362463] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.362764] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.866197] env[62619]: DEBUG nova.compute.utils [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2231.369867] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2232.430537] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2232.430956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2232.431027] env[62619]: INFO nova.compute.manager [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Attaching volume 08af3e07-1e8a-4e59-8d5a-258ce24dcc3c to /dev/sdb [ 2232.460639] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4aadf8e-ca03-4be2-8f5c-6990673c54cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.468386] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c604ed7-0e7b-497d-916f-d6c8be8027f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.481319] env[62619]: DEBUG nova.virt.block_device [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Updating existing volume attachment record: d402fa35-6dc4-44aa-84da-cffa91839ba7 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2237.023165] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2237.023450] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369213', 'volume_id': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'name': 'volume-08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e165e8ff-4f48-4047-bb29-d77d4e0b40aa', 'attached_at': '', 'detached_at': '', 'volume_id': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'serial': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2237.024342] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4decfc30-6699-46ae-8b5c-6e39538bbcb6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.040735] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cf1d61-36fe-4819-8369-97d2b05c77ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.065430] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-08af3e07-1e8a-4e59-8d5a-258ce24dcc3c/volume-08af3e07-1e8a-4e59-8d5a-258ce24dcc3c.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2237.065711] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80656d5d-9ba1-43fd-bde0-acb548298d0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.082623] env[62619]: DEBUG oslo_vmware.api [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2237.082623] env[62619]: value = "task-1778841" [ 2237.082623] env[62619]: _type = "Task" [ 2237.082623] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2237.090084] env[62619]: DEBUG oslo_vmware.api [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778841, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.592654] env[62619]: DEBUG oslo_vmware.api [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778841, 'name': ReconfigVM_Task, 'duration_secs': 0.295054} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2237.592915] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-08af3e07-1e8a-4e59-8d5a-258ce24dcc3c/volume-08af3e07-1e8a-4e59-8d5a-258ce24dcc3c.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2237.597535] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02bd5ab9-d880-48d4-b065-622d1460985c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.611393] env[62619]: DEBUG oslo_vmware.api [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2237.611393] env[62619]: value = "task-1778842" [ 2237.611393] env[62619]: _type = "Task" [ 2237.611393] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2237.619107] env[62619]: DEBUG oslo_vmware.api [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778842, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2238.121513] env[62619]: DEBUG oslo_vmware.api [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778842, 'name': ReconfigVM_Task, 'duration_secs': 0.129045} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2238.121812] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369213', 'volume_id': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'name': 'volume-08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e165e8ff-4f48-4047-bb29-d77d4e0b40aa', 'attached_at': '', 'detached_at': '', 'volume_id': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'serial': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2239.179796] env[62619]: DEBUG nova.objects.instance [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lazy-loading 'flavor' on Instance uuid e165e8ff-4f48-4047-bb29-d77d4e0b40aa {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2239.686189] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15573c81-0dc3-45cf-a3ea-7c598adf9502 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.255s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2239.868292] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2239.868535] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2240.372275] env[62619]: INFO nova.compute.manager [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Detaching volume 08af3e07-1e8a-4e59-8d5a-258ce24dcc3c [ 2240.401606] env[62619]: INFO nova.virt.block_device [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Attempting to driver detach volume 08af3e07-1e8a-4e59-8d5a-258ce24dcc3c from mountpoint /dev/sdb [ 2240.401843] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2240.402041] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369213', 'volume_id': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'name': 'volume-08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e165e8ff-4f48-4047-bb29-d77d4e0b40aa', 'attached_at': '', 'detached_at': '', 'volume_id': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'serial': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2240.402915] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d28c58b-5c8b-44a1-8666-4222450f3602 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.426822] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb3a4e7-b5a0-4812-9e2d-eb3d8c01496e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.433711] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d0de8b-f467-4aba-be12-a02d22a26aec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.453412] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef7368f-0a17-455d-af5c-35352b115df8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.467698] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] The volume has not been displaced from its original location: [datastore1] volume-08af3e07-1e8a-4e59-8d5a-258ce24dcc3c/volume-08af3e07-1e8a-4e59-8d5a-258ce24dcc3c.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2240.472803] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2240.473095] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a393a0f0-208e-4062-878a-fb69d9ef4ee6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.490856] env[62619]: DEBUG oslo_vmware.api [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2240.490856] env[62619]: value = "task-1778843" [ 2240.490856] env[62619]: _type = "Task" [ 2240.490856] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.498450] env[62619]: DEBUG oslo_vmware.api [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778843, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.708581] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2240.708820] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 2241.000380] env[62619]: DEBUG oslo_vmware.api [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778843, 'name': ReconfigVM_Task, 'duration_secs': 0.186262} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2241.000615] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2241.005165] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24418e96-0e33-45b4-8cb3-d96c49be8697 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.020283] env[62619]: DEBUG oslo_vmware.api [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2241.020283] env[62619]: value = "task-1778844" [ 2241.020283] env[62619]: _type = "Task" [ 2241.020283] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2241.027665] env[62619]: DEBUG oslo_vmware.api [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778844, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.529644] env[62619]: DEBUG oslo_vmware.api [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778844, 'name': ReconfigVM_Task, 'duration_secs': 0.13527} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2241.529965] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369213', 'volume_id': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'name': 'volume-08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e165e8ff-4f48-4047-bb29-d77d4e0b40aa', 'attached_at': '', 'detached_at': '', 'volume_id': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c', 'serial': '08af3e07-1e8a-4e59-8d5a-258ce24dcc3c'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2242.068773] env[62619]: DEBUG nova.objects.instance [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lazy-loading 'flavor' on Instance uuid e165e8ff-4f48-4047-bb29-d77d4e0b40aa {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2243.076727] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cecc16d0-1570-4fef-bf72-dd7a31ee6015 tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.208s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2244.099051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2244.099051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2244.099051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2244.099051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2244.099051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2244.101108] env[62619]: INFO nova.compute.manager [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Terminating instance [ 2244.605250] env[62619]: DEBUG nova.compute.manager [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2244.605499] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2244.606394] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cebb4e1-17a1-4f68-ab48-fd8ffe41143c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.614479] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2244.614699] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0878aaf-0195-4924-96a9-dc5e1522717f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.620349] env[62619]: DEBUG oslo_vmware.api [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2244.620349] env[62619]: value = "task-1778845" [ 2244.620349] env[62619]: _type = "Task" [ 2244.620349] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.628102] env[62619]: DEBUG oslo_vmware.api [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.129920] env[62619]: DEBUG oslo_vmware.api [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778845, 'name': PowerOffVM_Task, 'duration_secs': 0.168} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.130352] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2245.130398] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2245.130651] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47b9c0e1-2141-4c0d-9a87-bd12a6229767 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.203686] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2245.203872] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2245.204049] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleting the datastore file [datastore1] e165e8ff-4f48-4047-bb29-d77d4e0b40aa {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2245.204396] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93b322dc-9feb-4d07-919e-e7ba26428581 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.211465] env[62619]: DEBUG oslo_vmware.api [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for the task: (returnval){ [ 2245.211465] env[62619]: value = "task-1778847" [ 2245.211465] env[62619]: _type = "Task" [ 2245.211465] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.219178] env[62619]: DEBUG oslo_vmware.api [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.721537] env[62619]: DEBUG oslo_vmware.api [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Task: {'id': task-1778847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238406} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.721799] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2245.721981] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2245.722200] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2245.722380] env[62619]: INFO nova.compute.manager [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2245.722617] env[62619]: DEBUG oslo.service.loopingcall [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2245.722809] env[62619]: DEBUG nova.compute.manager [-] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2245.722900] env[62619]: DEBUG nova.network.neutron [-] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2246.201795] env[62619]: DEBUG nova.compute.manager [req-46638cd2-bf7b-4bcd-8039-284f55784fe9 req-6549ea71-5484-413e-b8fe-6776225c8b52 service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Received event network-vif-deleted-13b4d2ff-6fc8-4ea9-9853-4fa6022969a8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2246.202083] env[62619]: INFO nova.compute.manager [req-46638cd2-bf7b-4bcd-8039-284f55784fe9 req-6549ea71-5484-413e-b8fe-6776225c8b52 service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Neutron deleted interface 13b4d2ff-6fc8-4ea9-9853-4fa6022969a8; detaching it from the instance and deleting it from the info cache [ 2246.202221] env[62619]: DEBUG nova.network.neutron [req-46638cd2-bf7b-4bcd-8039-284f55784fe9 req-6549ea71-5484-413e-b8fe-6776225c8b52 service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2246.642130] env[62619]: DEBUG nova.network.neutron [-] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2246.705906] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8377a012-eb5c-477c-abc0-7c61c4340494 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.709585] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2246.709741] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 2246.709857] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 2246.715836] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fd38e7-f9b5-4eab-9b31-c8b560eb052f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.741634] env[62619]: DEBUG nova.compute.manager [req-46638cd2-bf7b-4bcd-8039-284f55784fe9 req-6549ea71-5484-413e-b8fe-6776225c8b52 service nova] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Detach interface failed, port_id=13b4d2ff-6fc8-4ea9-9853-4fa6022969a8, reason: Instance e165e8ff-4f48-4047-bb29-d77d4e0b40aa could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2247.144817] env[62619]: INFO nova.compute.manager [-] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Took 1.42 seconds to deallocate network for instance. [ 2247.213409] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: e165e8ff-4f48-4047-bb29-d77d4e0b40aa] Skipping network cache update for instance because it is being deleted. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10326}} [ 2247.268313] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2247.268457] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquired lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2247.268611] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2247.268755] env[62619]: DEBUG nova.objects.instance [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lazy-loading 'info_cache' on Instance uuid 69201bea-4f3a-45ba-b408-aec1cf28387a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2247.651654] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2247.651924] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2247.652208] env[62619]: DEBUG nova.objects.instance [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lazy-loading 'resources' on Instance uuid e165e8ff-4f48-4047-bb29-d77d4e0b40aa {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2247.969505] env[62619]: DEBUG oslo_concurrency.lockutils [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "69201bea-4f3a-45ba-b408-aec1cf28387a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2247.969804] env[62619]: DEBUG oslo_concurrency.lockutils [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "69201bea-4f3a-45ba-b408-aec1cf28387a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2247.969998] env[62619]: DEBUG nova.compute.manager [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2247.970936] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c61580-1ff3-417a-83a0-d03e1ca81f22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.977996] env[62619]: DEBUG nova.compute.manager [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2247.978555] env[62619]: DEBUG nova.objects.instance [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'flavor' on Instance uuid 69201bea-4f3a-45ba-b408-aec1cf28387a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2248.172507] env[62619]: DEBUG nova.scheduler.client.report [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Refreshing inventories for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2248.186715] env[62619]: DEBUG nova.scheduler.client.report [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Updating ProviderTree inventory for provider e814b747-ed75-487b-a97d-acf66bc6db0b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2248.186937] env[62619]: DEBUG nova.compute.provider_tree [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Updating inventory in ProviderTree for provider e814b747-ed75-487b-a97d-acf66bc6db0b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2248.197118] env[62619]: DEBUG nova.scheduler.client.report [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Refreshing aggregate associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2248.214042] env[62619]: DEBUG nova.scheduler.client.report [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Refreshing trait associations for resource provider e814b747-ed75-487b-a97d-acf66bc6db0b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2248.247292] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6651d47b-9f31-4934-a6fe-ee0883f8a36e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.255530] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83190b01-370c-46a8-bdb8-3ecff6a64a8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.287394] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5752370-a8e0-4ee0-bb29-e100d08f155e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.295025] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bd8ab4-5944-41ff-ace4-b1abaeccdc2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.308725] env[62619]: DEBUG nova.compute.provider_tree [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2248.812592] env[62619]: DEBUG nova.scheduler.client.report [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2248.985033] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2248.985318] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca944070-4ea1-4209-8bfe-893265f76701 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.993200] env[62619]: DEBUG oslo_vmware.api [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2248.993200] env[62619]: value = "task-1778848" [ 2248.993200] env[62619]: _type = "Task" [ 2248.993200] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.000572] env[62619]: DEBUG oslo_vmware.api [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778848, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.005459] env[62619]: DEBUG nova.network.neutron [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updating instance_info_cache with network_info: [{"id": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "address": "fa:16:3e:9d:f0:dc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f2c43cf-7c", "ovs_interfaceid": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2249.317845] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.666s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2249.338681] env[62619]: INFO nova.scheduler.client.report [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Deleted allocations for instance e165e8ff-4f48-4047-bb29-d77d4e0b40aa [ 2249.503193] env[62619]: DEBUG oslo_vmware.api [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778848, 'name': PowerOffVM_Task, 'duration_secs': 0.206534} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.503464] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2249.503652] env[62619]: DEBUG nova.compute.manager [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2249.504388] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3e66b0-b7f7-4c6c-af5f-41089eceeca9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.508296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Releasing lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2249.508461] env[62619]: DEBUG nova.compute.manager [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 2249.509938] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2249.511945] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2249.512138] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2249.512330] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2249.708825] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2249.845642] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0359ebce-1e2f-4c0f-91fc-1e36a3bb547e tempest-AttachVolumeNegativeTest-1686870837 tempest-AttachVolumeNegativeTest-1686870837-project-member] Lock "e165e8ff-4f48-4047-bb29-d77d4e0b40aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.747s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2250.017037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-131fc37a-5c3b-4545-9d28-1708ed7c0e4b tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "69201bea-4f3a-45ba-b408-aec1cf28387a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2250.211312] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2250.211591] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2250.211713] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2250.211866] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2250.212817] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d5e5f7-a218-4038-94e3-834aa2d6f01c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.222288] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b8f88c-b623-45d2-98f7-d65e8819cae6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.235409] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0855f5e-dd5b-4dab-aa4d-0ab3f247349f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.241380] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6242aa-5f7d-4932-9735-57519e3f4660 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.269603] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181244MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2250.269741] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2250.269937] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2250.314969] env[62619]: DEBUG nova.objects.instance [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'flavor' on Instance uuid 69201bea-4f3a-45ba-b408-aec1cf28387a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2250.819108] env[62619]: DEBUG oslo_concurrency.lockutils [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2250.819515] env[62619]: DEBUG oslo_concurrency.lockutils [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2250.819515] env[62619]: DEBUG nova.network.neutron [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2250.819699] env[62619]: DEBUG nova.objects.instance [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'info_cache' on Instance uuid 69201bea-4f3a-45ba-b408-aec1cf28387a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2251.295742] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Instance 69201bea-4f3a-45ba-b408-aec1cf28387a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2251.295999] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2251.296165] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2251.322337] env[62619]: DEBUG nova.objects.base [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Object Instance<69201bea-4f3a-45ba-b408-aec1cf28387a> lazy-loaded attributes: flavor,info_cache {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2251.324803] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddacf490-85b7-4fec-9516-ba73a4322a49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.332958] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1042aa-d0b1-4979-8a45-f94f10180e7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.363200] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a882f4a7-dd0b-4940-b13d-a5ff64f3a883 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.369711] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df5dcdf-8c86-4629-aed9-438ec63d39c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.382585] env[62619]: DEBUG nova.compute.provider_tree [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2251.885775] env[62619]: DEBUG nova.scheduler.client.report [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2252.042431] env[62619]: DEBUG nova.network.neutron [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updating instance_info_cache with network_info: [{"id": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "address": "fa:16:3e:9d:f0:dc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f2c43cf-7c", "ovs_interfaceid": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2252.390306] env[62619]: DEBUG nova.compute.resource_tracker [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2252.390504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.121s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.545513] env[62619]: DEBUG oslo_concurrency.lockutils [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2253.390764] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2253.552311] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2253.552652] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7dbb5081-5575-44d7-8d95-6c3c3753d25e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.564588] env[62619]: DEBUG oslo_vmware.api [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2253.564588] env[62619]: value = "task-1778850" [ 2253.564588] env[62619]: _type = "Task" [ 2253.564588] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.575643] env[62619]: DEBUG oslo_vmware.api [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778850, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.896271] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2254.074831] env[62619]: DEBUG oslo_vmware.api [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778850, 'name': PowerOnVM_Task, 'duration_secs': 0.357679} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2254.075192] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2254.075448] env[62619]: DEBUG nova.compute.manager [None req-398cd48c-b96d-499b-9e86-dd1c03c15777 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2254.076221] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f673630-96d5-4f75-a907-55414d90b91f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.210445] env[62619]: DEBUG oslo_service.periodic_task [None req-27d4d035-30c2-4330-ae5b-e0e3b103ab5b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2255.263521] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d544b4-522d-43a5-9114-a700bf9d0e9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.270292] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6b279f5c-2dcb-4100-9493-dcd606a9e2b9 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Suspending the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2255.270508] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-6c40d084-aea2-42c7-8550-1a12d8b5c31b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.276669] env[62619]: DEBUG oslo_vmware.api [None req-6b279f5c-2dcb-4100-9493-dcd606a9e2b9 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2255.276669] env[62619]: value = "task-1778852" [ 2255.276669] env[62619]: _type = "Task" [ 2255.276669] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2255.284880] env[62619]: DEBUG oslo_vmware.api [None req-6b279f5c-2dcb-4100-9493-dcd606a9e2b9 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778852, 'name': SuspendVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2255.786783] env[62619]: DEBUG oslo_vmware.api [None req-6b279f5c-2dcb-4100-9493-dcd606a9e2b9 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778852, 'name': SuspendVM_Task} progress is 75%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.287663] env[62619]: DEBUG oslo_vmware.api [None req-6b279f5c-2dcb-4100-9493-dcd606a9e2b9 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778852, 'name': SuspendVM_Task, 'duration_secs': 0.650443} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2256.288095] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6b279f5c-2dcb-4100-9493-dcd606a9e2b9 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Suspended the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2256.288095] env[62619]: DEBUG nova.compute.manager [None req-6b279f5c-2dcb-4100-9493-dcd606a9e2b9 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2256.288821] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5c6b0b-c822-4ed4-ba1c-134c6b11a3c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.624742] env[62619]: INFO nova.compute.manager [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Resuming [ 2257.625409] env[62619]: DEBUG nova.objects.instance [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'flavor' on Instance uuid 69201bea-4f3a-45ba-b408-aec1cf28387a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2259.135915] env[62619]: DEBUG oslo_concurrency.lockutils [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2259.136170] env[62619]: DEBUG oslo_concurrency.lockutils [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquired lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2259.136317] env[62619]: DEBUG nova.network.neutron [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2259.839282] env[62619]: DEBUG nova.network.neutron [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updating instance_info_cache with network_info: [{"id": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "address": "fa:16:3e:9d:f0:dc", "network": {"id": "f9bb9a12-51df-4bba-b71f-5b0fac6734fd", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-940473749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2c7470712c14fa9bc1804ae2431107b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f2c43cf-7c", "ovs_interfaceid": "5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2260.342365] env[62619]: DEBUG oslo_concurrency.lockutils [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Releasing lock "refresh_cache-69201bea-4f3a-45ba-b408-aec1cf28387a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2260.343357] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bb37a5-cd77-462c-abfd-98a3b7540fef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.350302] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Resuming the VM {{(pid=62619) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2260.350517] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d468089e-1ad1-4947-aec0-bf375070a753 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.356743] env[62619]: DEBUG oslo_vmware.api [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2260.356743] env[62619]: value = "task-1778854" [ 2260.356743] env[62619]: _type = "Task" [ 2260.356743] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.364265] env[62619]: DEBUG oslo_vmware.api [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.867020] env[62619]: DEBUG oslo_vmware.api [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778854, 'name': PowerOnVM_Task} progress is 93%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2261.368280] env[62619]: DEBUG oslo_vmware.api [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778854, 'name': PowerOnVM_Task, 'duration_secs': 0.647335} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2261.368660] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Resumed the VM {{(pid=62619) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2261.368751] env[62619]: DEBUG nova.compute.manager [None req-76d81e14-1fb2-41ee-88e3-f8774eacbdaa tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2261.369463] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02726425-3920-4224-8d5f-2819fe4302e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.316371] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "69201bea-4f3a-45ba-b408-aec1cf28387a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2262.316648] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "69201bea-4f3a-45ba-b408-aec1cf28387a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2262.316872] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "69201bea-4f3a-45ba-b408-aec1cf28387a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2262.317075] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "69201bea-4f3a-45ba-b408-aec1cf28387a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2262.317249] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "69201bea-4f3a-45ba-b408-aec1cf28387a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2262.319374] env[62619]: INFO nova.compute.manager [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Terminating instance [ 2262.824043] env[62619]: DEBUG nova.compute.manager [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2262.824043] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2262.825026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ceb8ae-4304-4bab-ad80-60dae439d77d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.832914] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2262.833159] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-551c733b-1f8b-4427-a978-2921d67f6c42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.839016] env[62619]: DEBUG oslo_vmware.api [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2262.839016] env[62619]: value = "task-1778855" [ 2262.839016] env[62619]: _type = "Task" [ 2262.839016] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.847339] env[62619]: DEBUG oslo_vmware.api [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778855, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.349871] env[62619]: DEBUG oslo_vmware.api [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778855, 'name': PowerOffVM_Task, 'duration_secs': 0.240679} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.350123] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2263.350266] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2263.350516] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53c0ac2e-66b6-4ee2-b68b-dce237011bd9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.796959] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2263.797221] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2263.797382] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleting the datastore file [datastore1] 69201bea-4f3a-45ba-b408-aec1cf28387a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2263.797663] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34c2bdc1-4b2a-4a7a-ba16-020af9d37329 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.804482] env[62619]: DEBUG oslo_vmware.api [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for the task: (returnval){ [ 2263.804482] env[62619]: value = "task-1778857" [ 2263.804482] env[62619]: _type = "Task" [ 2263.804482] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.811568] env[62619]: DEBUG oslo_vmware.api [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.314415] env[62619]: DEBUG oslo_vmware.api [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Task: {'id': task-1778857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141172} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2264.314783] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2264.314783] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2264.314949] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2264.315140] env[62619]: INFO nova.compute.manager [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Took 1.49 seconds to destroy the instance on the hypervisor. [ 2264.315378] env[62619]: DEBUG oslo.service.loopingcall [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2264.315563] env[62619]: DEBUG nova.compute.manager [-] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2264.315657] env[62619]: DEBUG nova.network.neutron [-] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2264.745268] env[62619]: DEBUG nova.compute.manager [req-9eb75e33-ab61-4e28-838f-dfdb6766986a req-5ffa0492-4bc1-4933-9eac-f21ef1d11a70 service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Received event network-vif-deleted-5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2264.745268] env[62619]: INFO nova.compute.manager [req-9eb75e33-ab61-4e28-838f-dfdb6766986a req-5ffa0492-4bc1-4933-9eac-f21ef1d11a70 service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Neutron deleted interface 5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6; detaching it from the instance and deleting it from the info cache [ 2264.745268] env[62619]: DEBUG nova.network.neutron [req-9eb75e33-ab61-4e28-838f-dfdb6766986a req-5ffa0492-4bc1-4933-9eac-f21ef1d11a70 service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2265.221874] env[62619]: DEBUG nova.network.neutron [-] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2265.247483] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8cfab01-3038-4388-9998-62d0c861b2fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.257694] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0227f505-50a9-4d3b-98d5-779dcaf8d436 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.281664] env[62619]: DEBUG nova.compute.manager [req-9eb75e33-ab61-4e28-838f-dfdb6766986a req-5ffa0492-4bc1-4933-9eac-f21ef1d11a70 service nova] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Detach interface failed, port_id=5f2c43cf-7cbb-40d9-9cc6-8b21ac9183d6, reason: Instance 69201bea-4f3a-45ba-b408-aec1cf28387a could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2265.725413] env[62619]: INFO nova.compute.manager [-] [instance: 69201bea-4f3a-45ba-b408-aec1cf28387a] Took 1.41 seconds to deallocate network for instance. [ 2266.236048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2266.236048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2266.236048] env[62619]: DEBUG nova.objects.instance [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lazy-loading 'resources' on Instance uuid 69201bea-4f3a-45ba-b408-aec1cf28387a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2266.769091] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30627089-57fe-4813-b242-1effac84466e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.777137] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca869ad-a479-4824-abbe-c2eb6d73bfb4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.807971] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3d1b7c-d87a-4d99-90ed-b130ee3e28f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.814916] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231e1df5-110a-4612-9f1c-e8e6cb853072 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.828145] env[62619]: DEBUG nova.compute.provider_tree [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed in ProviderTree for provider: e814b747-ed75-487b-a97d-acf66bc6db0b {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2267.331044] env[62619]: DEBUG nova.scheduler.client.report [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Inventory has not changed for provider e814b747-ed75-487b-a97d-acf66bc6db0b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2267.836873] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.855674] env[62619]: INFO nova.scheduler.client.report [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Deleted allocations for instance 69201bea-4f3a-45ba-b408-aec1cf28387a [ 2268.363462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fc6701d-a1f0-47f4-b0a2-7088b1d73302 tempest-ServerActionsTestJSON-347979293 tempest-ServerActionsTestJSON-347979293-project-member] Lock "69201bea-4f3a-45ba-b408-aec1cf28387a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.047s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}